diff --git a/clang/include/clang/Basic/riscv_vector.td b/clang/include/clang/Basic/riscv_vector.td --- a/clang/include/clang/Basic/riscv_vector.td +++ b/clang/include/clang/Basic/riscv_vector.td @@ -537,23 +537,51 @@ bit val = !or(!eq(type, "h"), !eq(type, "f"), !eq(type, "d")); } -multiclass RVVVLEBuiltin types> { +let HasNoMaskedOverloaded = false, + ManualCodegen = [{ + IntrinsicTypes = {ResultType, Ops[1]->getType()}; + Ops[0] = Builder.CreateBitCast(Ops[0], ResultType->getPointerTo()); + }], + ManualCodegenMask= [{ + IntrinsicTypes = {ResultType, Ops[3]->getType()}; + Ops[1] = Builder.CreateBitCast(Ops[1], ResultType->getPointerTo()); + }] in { + class RVVVLEMaskBuiltin : RVVBuiltin<"m", "mPCUe", "c"> { + let Name = "vle1_v"; + let IRName = "vle1"; + let HasMask = false; + } + multiclass RVVVLEBuiltin types> { + let Name = NAME # "_v", + IRName = "vle", + IRNameMask ="vle_mask" in { + foreach type = types in { + def : RVVBuiltin<"v", "vPCe", type>; + if !not(IsFloat.val) then { + def : RVVBuiltin<"Uv", "UvPCUe", type>; + } + } + } + } +} + +multiclass RVVVLSEBuiltin types> { let Name = NAME # "_v", - IRName = "vle", - IRNameMask ="vle_mask", + IRName = "vlse", + IRNameMask ="vlse_mask", HasNoMaskedOverloaded = false, ManualCodegen = [{ - IntrinsicTypes = {ResultType, Ops[1]->getType()}; + IntrinsicTypes = {ResultType, Ops[2]->getType()}; Ops[0] = Builder.CreateBitCast(Ops[0], ResultType->getPointerTo()); }], ManualCodegenMask= [{ - IntrinsicTypes = {ResultType, Ops[3]->getType()}; + IntrinsicTypes = {ResultType, Ops[4]->getType()}; Ops[1] = Builder.CreateBitCast(Ops[1], ResultType->getPointerTo()); }] in { foreach type = types in { - def : RVVBuiltin<"v", "vPCe", type>; + def : RVVBuiltin<"v", "vPCet", type>; if !not(IsFloat.val) then { - def : RVVBuiltin<"Uv", "UvPCUe", type>; + def : RVVBuiltin<"Uv", "UvPCUet", type>; } } } @@ -583,29 +611,84 @@ } } -multiclass RVVVSEBuiltin types> { +let HasMaskedOffOperand = false, + PermuteOperands = [1, 0], // C/C++ Operand: (ptr, value, vl). Builtin: (value, ptr, vl) + ManualCodegen = [{ + Ops[1] = Builder.CreateBitCast(Ops[1], Ops[0]->getType()->getPointerTo()); + IntrinsicTypes = {Ops[0]->getType(), Ops[2]->getType()}; + }], + ManualCodegenMask= [{ + Ops[1] = Builder.CreateBitCast(Ops[1], Ops[0]->getType()->getPointerTo()); + IntrinsicTypes = {Ops[0]->getType(), Ops[3]->getType()}; + }] in { + class RVVVSEMaskBuiltin : RVVBuiltin<"m", "0mPUe", "c"> { + let Name = "vse1_v"; + let IRName = "vse1"; + let HasMask = false; + } + multiclass RVVVSEBuiltin types> { + let Name = NAME # "_v", + IRName = "vse", + IRNameMask = "vse_mask" in { + foreach type = types in { + def : RVVBuiltin<"v", "0vPe", type>; + if !not(IsFloat.val) then { + def : RVVBuiltin<"Uv", "0UvPUe", type>; + } + } + } + } +} + +multiclass RVVVSSEBuiltin types> { let Name = NAME # "_v", - IRName = "vse", - IRNameMask = "vse_mask", + IRName = "vsse", + IRNameMask = "vsse_mask", HasMaskedOffOperand = false, - PermuteOperands = [1, 0], // C/C++ Operand: (ptr, value, vl). Builtin: (value, ptr, vl) + PermuteOperands = [1, 2, 0], // C/C++ Operand: (ptr, stride, value, vl). Builtin: (value, ptr, stride, vl) ManualCodegen = [{ Ops[1] = Builder.CreateBitCast(Ops[1], Ops[0]->getType()->getPointerTo()); - IntrinsicTypes = {Ops[0]->getType(), Ops[2]->getType()}; + IntrinsicTypes = {Ops[0]->getType(), Ops[3]->getType()}; }], ManualCodegenMask= [{ Ops[1] = Builder.CreateBitCast(Ops[1], Ops[0]->getType()->getPointerTo()); - IntrinsicTypes = {Ops[0]->getType(), Ops[3]->getType()}; + IntrinsicTypes = {Ops[0]->getType(), Ops[4]->getType()}; }] in { foreach type = types in { - def : RVVBuiltin<"v", "0vPe", type>; + def : RVVBuiltin<"v", "0vPet", type>; if !not(IsFloat.val) then { - def : RVVBuiltin<"Uv", "0UvPUe", type>; + def : RVVBuiltin<"Uv", "0UvPUet", type>; } } } } +multiclass RVVIndexedStore { + let HasMaskedOffOperand = false, + PermuteOperands = [1, 2, 0], // C/C++ Operand: (ptr, index, value, vl). Builtin: (value, ptr, index, vl) + ManualCodegen = [{ + Ops[1] = Builder.CreateBitCast(Ops[1],Ops[0]->getType()->getPointerTo()); + IntrinsicTypes = {Ops[0]->getType(), Ops[2]->getType(), Ops[3]->getType()}; + }], + ManualCodegenMask= [{ + Ops[1] = Builder.CreateBitCast(Ops[1], Ops[0]->getType()->getPointerTo()); + IntrinsicTypes = {Ops[0]->getType(), Ops[2]->getType(), Ops[4]->getType()}; + }] in { + foreach type = TypeList in { + foreach eew_list = EEWList in { + defvar eew = eew_list[0]; + defvar eew_type = eew_list[1]; + let Name = op # eew # "_v", IRName = op, IRNameMask = op # "_mask" in { + def : RVVBuiltin<"v", "0vPe" # eew_type # "Uv", type>; + if !not(IsFloat.val) then { + def : RVVBuiltin<"Uv", "0UvPUe" # eew_type # "Uv", type>; + } + } + } + } + } +} + // 6. Configuration-Setting Instructions // 6.1. vsetvli/vsetvl instructions let HasVL = false, @@ -680,20 +763,36 @@ // 7. Vector Loads and Stores // 7.4. Vector Unit-Stride Instructions +def vle1: RVVVLEMaskBuiltin; defm vle8: RVVVLEBuiltin<["c"]>; defm vle16: RVVVLEBuiltin<["s"]>; defm vle32: RVVVLEBuiltin<["i","f"]>; defm vle64: RVVVLEBuiltin<["l","d"]>; +def vse1 : RVVVSEMaskBuiltin; defm vse8 : RVVVSEBuiltin<["c"]>; defm vse16: RVVVSEBuiltin<["s"]>; defm vse32: RVVVSEBuiltin<["i","f"]>; defm vse64: RVVVSEBuiltin<["l","d"]>; +// 7.5. Vector Strided Instructions +defm vlse8: RVVVLSEBuiltin<["c"]>; +defm vlse16: RVVVLSEBuiltin<["s"]>; +defm vlse32: RVVVLSEBuiltin<["i","f"]>; +defm vlse64: RVVVLSEBuiltin<["l","d"]>; + +defm vsse8 : RVVVSSEBuiltin<["c"]>; +defm vsse16: RVVVSSEBuiltin<["s"]>; +defm vsse32: RVVVSSEBuiltin<["i","f"]>; +defm vsse64: RVVVSSEBuiltin<["l","d"]>; + // 7.6. Vector Indexed Instructions defm : RVVIndexedLoad<"vluxei">; defm : RVVIndexedLoad<"vloxei">; +defm : RVVIndexedStore<"vsuxei">; +defm : RVVIndexedStore<"vsoxei">; + // 12. Vector Integer Arithmetic Instructions // 12.1. Vector Single-Width Integer Add and Subtract defm vadd : RVVIntBinBuiltinSet; diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vlse.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vlse.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vlse.c @@ -0,0 +1,965 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv32 -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV32 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -Werror -Wall -o - %s -S >/dev/null 2>&1 | FileCheck --check-prefix=ASM --allow-empty %s + +// ASM-NOT: warning +#include + +// CHECK-RV32-LABEL: @test_vlse8_v_i8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8:#.*]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_i8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8:#.*]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint8mf8_t test_vlse8_v_i8mf8_m(vbool64_t mask, vint8mf8_t maskedoff, + const int8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_i8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_i8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint8mf4_t test_vlse8_v_i8mf4_m(vbool32_t mask, vint8mf4_t maskedoff, + const int8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_i8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_i8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint8mf2_t test_vlse8_v_i8mf2_m(vbool16_t mask, vint8mf2_t maskedoff, + const int8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_i8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_i8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint8m1_t test_vlse8_v_i8m1_m(vbool8_t mask, vint8m1_t maskedoff, + const int8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_i8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_i8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint8m2_t test_vlse8_v_i8m2_m(vbool4_t mask, vint8m2_t maskedoff, + const int8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_i8m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv32i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_i8m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv32i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint8m4_t test_vlse8_v_i8m4_m(vbool2_t mask, vint8m4_t maskedoff, + const int8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_i8m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv64i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_i8m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv64i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint8m8_t test_vlse8_v_i8m8_m(vbool1_t mask, vint8m8_t maskedoff, + const int8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_i16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i16.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_i16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i16.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint16mf4_t test_vlse16_v_i16mf4_m(vbool64_t mask, vint16mf4_t maskedoff, + const int16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_i16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i16.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_i16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i16.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint16mf2_t test_vlse16_v_i16mf2_m(vbool32_t mask, vint16mf2_t maskedoff, + const int16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_i16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i16.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_i16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i16.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint16m1_t test_vlse16_v_i16m1_m(vbool16_t mask, vint16m1_t maskedoff, + const int16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_i16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i16.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_i16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i16.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint16m2_t test_vlse16_v_i16m2_m(vbool8_t mask, vint16m2_t maskedoff, + const int16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_i16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16i16.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_i16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16i16.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint16m4_t test_vlse16_v_i16m4_m(vbool4_t mask, vint16m4_t maskedoff, + const int16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_i16m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv32i16.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_i16m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv32i16.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint16m8_t test_vlse16_v_i16m8_m(vbool2_t mask, vint16m8_t maskedoff, + const int16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vlse32_v_i32mf2_m(vbool64_t mask, vint32mf2_t maskedoff, + const int32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vlse32_v_i32m1_m(vbool32_t mask, vint32m1_t maskedoff, + const int32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vlse32_v_i32m2_m(vbool16_t mask, vint32m2_t maskedoff, + const int32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vlse32_v_i32m4_m(vbool8_t mask, vint32m4_t maskedoff, + const int32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16i32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16i32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vlse32_v_i32m8_m(vbool4_t mask, vint32m8_t maskedoff, + const int32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i64.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i64.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vlse64_v_i64m1_m(vbool64_t mask, vint64m1_t maskedoff, + const int64_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse64(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i64.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i64.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vlse64_v_i64m2_m(vbool32_t mask, vint64m2_t maskedoff, + const int64_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse64(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i64.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i64.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vlse64_v_i64m4_m(vbool16_t mask, vint64m4_t maskedoff, + const int64_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse64(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i64.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i64.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vlse64_v_i64m8_m(vbool8_t mask, vint64m8_t maskedoff, + const int64_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse64(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_u8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_u8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint8mf8_t test_vlse8_v_u8mf8_m(vbool64_t mask, vuint8mf8_t maskedoff, + const uint8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_u8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_u8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint8mf4_t test_vlse8_v_u8mf4_m(vbool32_t mask, vuint8mf4_t maskedoff, + const uint8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_u8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_u8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint8mf2_t test_vlse8_v_u8mf2_m(vbool16_t mask, vuint8mf2_t maskedoff, + const uint8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_u8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_u8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint8m1_t test_vlse8_v_u8m1_m(vbool8_t mask, vuint8m1_t maskedoff, + const uint8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_u8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_u8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint8m2_t test_vlse8_v_u8m2_m(vbool4_t mask, vuint8m2_t maskedoff, + const uint8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_u8m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv32i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_u8m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv32i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint8m4_t test_vlse8_v_u8m4_m(vbool2_t mask, vuint8m4_t maskedoff, + const uint8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_u8m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv64i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_u8m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv64i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint8m8_t test_vlse8_v_u8m8_m(vbool1_t mask, vuint8m8_t maskedoff, + const uint8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_u16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i16.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_u16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i16.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint16mf4_t test_vlse16_v_u16mf4_m(vbool64_t mask, vuint16mf4_t maskedoff, + const uint16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_u16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i16.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_u16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i16.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint16mf2_t test_vlse16_v_u16mf2_m(vbool32_t mask, vuint16mf2_t maskedoff, + const uint16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_u16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i16.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_u16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i16.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint16m1_t test_vlse16_v_u16m1_m(vbool16_t mask, vuint16m1_t maskedoff, + const uint16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_u16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i16.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_u16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i16.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint16m2_t test_vlse16_v_u16m2_m(vbool8_t mask, vuint16m2_t maskedoff, + const uint16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_u16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16i16.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_u16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16i16.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint16m4_t test_vlse16_v_u16m4_m(vbool4_t mask, vuint16m4_t maskedoff, + const uint16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_u16m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv32i16.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_u16m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv32i16.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint16m8_t test_vlse16_v_u16m8_m(vbool2_t mask, vuint16m8_t maskedoff, + const uint16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vlse32_v_u32mf2_m(vbool64_t mask, vuint32mf2_t maskedoff, + const uint32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vlse32_v_u32m1_m(vbool32_t mask, vuint32m1_t maskedoff, + const uint32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vlse32_v_u32m2_m(vbool16_t mask, vuint32m2_t maskedoff, + const uint32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vlse32_v_u32m4_m(vbool8_t mask, vuint32m4_t maskedoff, + const uint32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16i32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16i32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vlse32_v_u32m8_m(vbool4_t mask, vuint32m8_t maskedoff, + const uint32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i64.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i64.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vlse64_v_u64m1_m(vbool64_t mask, vuint64m1_t maskedoff, + const uint64_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse64(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i64.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i64.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vlse64_v_u64m2_m(vbool32_t mask, vuint64m2_t maskedoff, + const uint64_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse64(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i64.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i64.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vlse64_v_u64m4_m(vbool16_t mask, vuint64m4_t maskedoff, + const uint64_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse64(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i64.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i64.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vlse64_v_u64m8_m(vbool8_t mask, vuint64m8_t maskedoff, + const uint64_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse64(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1f32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1f32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32mf2_t test_vlse32_v_f32mf2_m(vbool64_t mask, vfloat32mf2_t maskedoff, + const float *base, ptrdiff_t bstride, + size_t vl) { + return vlse32(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2f32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2f32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m1_t test_vlse32_v_f32m1_m(vbool32_t mask, vfloat32m1_t maskedoff, + const float *base, ptrdiff_t bstride, + size_t vl) { + return vlse32(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4f32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4f32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m2_t test_vlse32_v_f32m2_m(vbool16_t mask, vfloat32m2_t maskedoff, + const float *base, ptrdiff_t bstride, + size_t vl) { + return vlse32(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8f32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8f32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m4_t test_vlse32_v_f32m4_m(vbool8_t mask, vfloat32m4_t maskedoff, + const float *base, ptrdiff_t bstride, + size_t vl) { + return vlse32(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_f32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16f32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_f32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16f32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m8_t test_vlse32_v_f32m8_m(vbool4_t mask, vfloat32m8_t maskedoff, + const float *base, ptrdiff_t bstride, + size_t vl) { + return vlse32(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1f64.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1f64.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m1_t test_vlse64_v_f64m1_m(vbool64_t mask, vfloat64m1_t maskedoff, + const double *base, ptrdiff_t bstride, + size_t vl) { + return vlse64(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2f64.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2f64.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m2_t test_vlse64_v_f64m2_m(vbool32_t mask, vfloat64m2_t maskedoff, + const double *base, ptrdiff_t bstride, + size_t vl) { + return vlse64(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4f64.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4f64.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m4_t test_vlse64_v_f64m4_m(vbool16_t mask, vfloat64m4_t maskedoff, + const double *base, ptrdiff_t bstride, + size_t vl) { + return vlse64(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8f64.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8f64.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m8_t test_vlse64_v_f64m8_m(vbool8_t mask, vfloat64m8_t maskedoff, + const double *base, ptrdiff_t bstride, + size_t vl) { + return vlse64(mask, maskedoff, base, bstride, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vse.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vse.c --- a/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vse.c +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vse.c @@ -13,13 +13,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_i8mf8( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11:[0-9]+]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11:#.*]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_i8mf8( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11:[0-9]+]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11:#.*]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_i8mf8(int8_t *base, vint8mf8_t value, size_t vl) { @@ -29,13 +29,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_i8mf4( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_i8mf4( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_i8mf4(int8_t *base, vint8mf4_t value, size_t vl) { @@ -45,13 +45,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_i8mf2( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_i8mf2( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_i8mf2(int8_t *base, vint8mf2_t value, size_t vl) { @@ -61,13 +61,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_i8m1( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_i8m1( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_i8m1(int8_t *base, vint8m1_t value, size_t vl) { @@ -77,13 +77,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_i8m2( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_i8m2( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_i8m2(int8_t *base, vint8m2_t value, size_t vl) { @@ -93,13 +93,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_i8m4( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_i8m4( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_i8m4(int8_t *base, vint8m4_t value, size_t vl) { @@ -109,13 +109,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_i8m8( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_i8m8( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_i8m8(int8_t *base, vint8m8_t value, size_t vl) { @@ -125,13 +125,13 @@ // CHECK-RV32-LABEL: @test_vse16_v_i16mf4( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse16_v_i16mf4( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse16_v_i16mf4(int16_t *base, vint16mf4_t value, size_t vl) { @@ -141,13 +141,13 @@ // CHECK-RV32-LABEL: @test_vse16_v_i16mf2( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse16_v_i16mf2( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse16_v_i16mf2(int16_t *base, vint16mf2_t value, size_t vl) { @@ -157,13 +157,13 @@ // CHECK-RV32-LABEL: @test_vse16_v_i16m1( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse16_v_i16m1( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse16_v_i16m1(int16_t *base, vint16m1_t value, size_t vl) { @@ -173,13 +173,13 @@ // CHECK-RV32-LABEL: @test_vse16_v_i16m2( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse16_v_i16m2( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse16_v_i16m2(int16_t *base, vint16m2_t value, size_t vl) { @@ -189,13 +189,13 @@ // CHECK-RV32-LABEL: @test_vse16_v_i16m4( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse16_v_i16m4( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse16_v_i16m4(int16_t *base, vint16m4_t value, size_t vl) { @@ -205,13 +205,13 @@ // CHECK-RV32-LABEL: @test_vse16_v_i16m8( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse16_v_i16m8( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse16_v_i16m8(int16_t *base, vint16m8_t value, size_t vl) { @@ -221,13 +221,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_i32mf2( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_i32mf2( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_i32mf2(int32_t *base, vint32mf2_t value, size_t vl) { @@ -237,13 +237,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_i32m1( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_i32m1( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_i32m1(int32_t *base, vint32m1_t value, size_t vl) { @@ -253,13 +253,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_i32m2( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_i32m2( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_i32m2(int32_t *base, vint32m2_t value, size_t vl) { @@ -269,13 +269,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_i32m4( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_i32m4( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_i32m4(int32_t *base, vint32m4_t value, size_t vl) { @@ -285,13 +285,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_i32m8( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_i32m8( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_i32m8(int32_t *base, vint32m8_t value, size_t vl) { @@ -301,13 +301,13 @@ // CHECK-RV32-LABEL: @test_vse64_v_i64m1( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse64_v_i64m1( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse64_v_i64m1(int64_t *base, vint64m1_t value, size_t vl) { @@ -317,13 +317,13 @@ // CHECK-RV32-LABEL: @test_vse64_v_i64m2( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse64_v_i64m2( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse64_v_i64m2(int64_t *base, vint64m2_t value, size_t vl) { @@ -333,13 +333,13 @@ // CHECK-RV32-LABEL: @test_vse64_v_i64m4( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse64_v_i64m4( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse64_v_i64m4(int64_t *base, vint64m4_t value, size_t vl) { @@ -349,13 +349,13 @@ // CHECK-RV32-LABEL: @test_vse64_v_i64m8( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse64_v_i64m8( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse64_v_i64m8(int64_t *base, vint64m8_t value, size_t vl) { @@ -365,13 +365,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_u8mf8( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_u8mf8( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_u8mf8(uint8_t *base, vuint8mf8_t value, size_t vl) { @@ -381,13 +381,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_u8mf4( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_u8mf4( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_u8mf4(uint8_t *base, vuint8mf4_t value, size_t vl) { @@ -397,13 +397,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_u8mf2( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_u8mf2( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_u8mf2(uint8_t *base, vuint8mf2_t value, size_t vl) { @@ -413,13 +413,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_u8m1( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_u8m1( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_u8m1(uint8_t *base, vuint8m1_t value, size_t vl) { @@ -429,13 +429,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_u8m2( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_u8m2( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_u8m2(uint8_t *base, vuint8m2_t value, size_t vl) { @@ -445,13 +445,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_u8m4( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_u8m4( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_u8m4(uint8_t *base, vuint8m4_t value, size_t vl) { @@ -461,13 +461,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_u8m8( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_u8m8( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_u8m8(uint8_t *base, vuint8m8_t value, size_t vl) { @@ -477,13 +477,13 @@ // CHECK-RV32-LABEL: @test_vse16_v_u16mf4( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse16_v_u16mf4( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse16_v_u16mf4(uint16_t *base, vuint16mf4_t value, size_t vl) { @@ -493,13 +493,13 @@ // CHECK-RV32-LABEL: @test_vse16_v_u16mf2( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse16_v_u16mf2( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse16_v_u16mf2(uint16_t *base, vuint16mf2_t value, size_t vl) { @@ -509,13 +509,13 @@ // CHECK-RV32-LABEL: @test_vse16_v_u16m1( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse16_v_u16m1( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse16_v_u16m1(uint16_t *base, vuint16m1_t value, size_t vl) { @@ -525,13 +525,13 @@ // CHECK-RV32-LABEL: @test_vse16_v_u16m2( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse16_v_u16m2( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse16_v_u16m2(uint16_t *base, vuint16m2_t value, size_t vl) { @@ -541,13 +541,13 @@ // CHECK-RV32-LABEL: @test_vse16_v_u16m4( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse16_v_u16m4( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse16_v_u16m4(uint16_t *base, vuint16m4_t value, size_t vl) { @@ -557,13 +557,13 @@ // CHECK-RV32-LABEL: @test_vse16_v_u16m8( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse16_v_u16m8( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse16_v_u16m8(uint16_t *base, vuint16m8_t value, size_t vl) { @@ -573,13 +573,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_u32mf2( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_u32mf2( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_u32mf2(uint32_t *base, vuint32mf2_t value, size_t vl) { @@ -589,13 +589,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_u32m1( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_u32m1( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_u32m1(uint32_t *base, vuint32m1_t value, size_t vl) { @@ -605,13 +605,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_u32m2( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_u32m2( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_u32m2(uint32_t *base, vuint32m2_t value, size_t vl) { @@ -621,13 +621,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_u32m4( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_u32m4( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_u32m4(uint32_t *base, vuint32m4_t value, size_t vl) { @@ -637,13 +637,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_u32m8( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_u32m8( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_u32m8(uint32_t *base, vuint32m8_t value, size_t vl) { @@ -653,13 +653,13 @@ // CHECK-RV32-LABEL: @test_vse64_v_u64m1( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse64_v_u64m1( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse64_v_u64m1(uint64_t *base, vuint64m1_t value, size_t vl) { @@ -669,13 +669,13 @@ // CHECK-RV32-LABEL: @test_vse64_v_u64m2( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse64_v_u64m2( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse64_v_u64m2(uint64_t *base, vuint64m2_t value, size_t vl) { @@ -685,13 +685,13 @@ // CHECK-RV32-LABEL: @test_vse64_v_u64m4( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse64_v_u64m4( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse64_v_u64m4(uint64_t *base, vuint64m4_t value, size_t vl) { @@ -701,13 +701,13 @@ // CHECK-RV32-LABEL: @test_vse64_v_u64m8( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse64_v_u64m8( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse64_v_u64m8(uint64_t *base, vuint64m8_t value, size_t vl) { @@ -717,13 +717,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_f32mf2( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv1f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv1f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_f32mf2( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv1f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv1f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_f32mf2(float *base, vfloat32mf2_t value, size_t vl) { @@ -733,13 +733,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_f32m1( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv2f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv2f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_f32m1( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv2f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv2f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_f32m1(float *base, vfloat32m1_t value, size_t vl) { @@ -749,13 +749,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_f32m2( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv4f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv4f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_f32m2( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv4f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv4f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_f32m2(float *base, vfloat32m2_t value, size_t vl) { @@ -765,13 +765,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_f32m4( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv8f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv8f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_f32m4( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv8f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv8f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_f32m4(float *base, vfloat32m4_t value, size_t vl) { @@ -781,13 +781,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_f32m8( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv16f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv16f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_f32m8( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv16f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv16f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_f32m8(float *base, vfloat32m8_t value, size_t vl) { @@ -797,13 +797,13 @@ // CHECK-RV32-LABEL: @test_vse64_v_f64m1( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv1f64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv1f64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse64_v_f64m1( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv1f64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv1f64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse64_v_f64m1(double *base, vfloat64m1_t value, size_t vl) { @@ -813,13 +813,13 @@ // CHECK-RV32-LABEL: @test_vse64_v_f64m2( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv2f64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv2f64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse64_v_f64m2( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv2f64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv2f64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse64_v_f64m2(double *base, vfloat64m2_t value, size_t vl) { @@ -829,13 +829,13 @@ // CHECK-RV32-LABEL: @test_vse64_v_f64m4( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv4f64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv4f64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse64_v_f64m4( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv4f64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv4f64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse64_v_f64m4(double *base, vfloat64m4_t value, size_t vl) { @@ -845,13 +845,13 @@ // CHECK-RV32-LABEL: @test_vse64_v_f64m8( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv8f64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.nxv8f64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse64_v_f64m8( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv8f64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.nxv8f64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse64_v_f64m8(double *base, vfloat64m8_t value, size_t vl) { @@ -861,13 +861,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_i8mf8_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_i8mf8_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_i8mf8_m(vbool64_t mask, int8_t *base, vint8mf8_t value, size_t vl) { @@ -877,13 +877,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_i8mf4_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_i8mf4_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_i8mf4_m(vbool32_t mask, int8_t *base, vint8mf4_t value, size_t vl) { @@ -893,13 +893,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_i8mf2_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_i8mf2_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_i8mf2_m(vbool16_t mask, int8_t *base, vint8mf2_t value, size_t vl) { @@ -909,13 +909,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_i8m1_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_i8m1_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_i8m1_m(vbool8_t mask, int8_t *base, vint8m1_t value, size_t vl) { @@ -925,13 +925,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_i8m2_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_i8m2_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_i8m2_m(vbool4_t mask, int8_t *base, vint8m2_t value, size_t vl) { @@ -941,13 +941,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_i8m4_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_i8m4_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_i8m4_m(vbool2_t mask, int8_t *base, vint8m4_t value, size_t vl) { @@ -957,13 +957,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_i8m8_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_i8m8_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_i8m8_m(vbool1_t mask, int8_t *base, vint8m8_t value, size_t vl) { @@ -973,13 +973,13 @@ // CHECK-RV32-LABEL: @test_vse16_v_i16mf4_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse16_v_i16mf4_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse16_v_i16mf4_m(vbool64_t mask, int16_t *base, vint16mf4_t value, size_t vl) { @@ -989,13 +989,13 @@ // CHECK-RV32-LABEL: @test_vse16_v_i16mf2_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse16_v_i16mf2_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse16_v_i16mf2_m(vbool32_t mask, int16_t *base, vint16mf2_t value, size_t vl) { @@ -1005,13 +1005,13 @@ // CHECK-RV32-LABEL: @test_vse16_v_i16m1_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse16_v_i16m1_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse16_v_i16m1_m(vbool16_t mask, int16_t *base, vint16m1_t value, size_t vl) { @@ -1021,13 +1021,13 @@ // CHECK-RV32-LABEL: @test_vse16_v_i16m2_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse16_v_i16m2_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse16_v_i16m2_m(vbool8_t mask, int16_t *base, vint16m2_t value, size_t vl) { @@ -1037,13 +1037,13 @@ // CHECK-RV32-LABEL: @test_vse16_v_i16m4_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse16_v_i16m4_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse16_v_i16m4_m(vbool4_t mask, int16_t *base, vint16m4_t value, size_t vl) { @@ -1053,13 +1053,13 @@ // CHECK-RV32-LABEL: @test_vse16_v_i16m8_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse16_v_i16m8_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse16_v_i16m8_m(vbool2_t mask, int16_t *base, vint16m8_t value, size_t vl) { @@ -1069,13 +1069,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_i32mf2_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_i32mf2_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_i32mf2_m(vbool64_t mask, int32_t *base, vint32mf2_t value, size_t vl) { @@ -1085,13 +1085,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_i32m1_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_i32m1_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_i32m1_m(vbool32_t mask, int32_t *base, vint32m1_t value, size_t vl) { @@ -1101,13 +1101,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_i32m2_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_i32m2_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_i32m2_m(vbool16_t mask, int32_t *base, vint32m2_t value, size_t vl) { @@ -1117,13 +1117,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_i32m4_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_i32m4_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_i32m4_m(vbool8_t mask, int32_t *base, vint32m4_t value, size_t vl) { @@ -1133,13 +1133,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_i32m8_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_i32m8_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_i32m8_m(vbool4_t mask, int32_t *base, vint32m8_t value, size_t vl) { @@ -1149,13 +1149,13 @@ // CHECK-RV32-LABEL: @test_vse64_v_i64m1_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse64_v_i64m1_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse64_v_i64m1_m(vbool64_t mask, int64_t *base, vint64m1_t value, size_t vl) { @@ -1165,13 +1165,13 @@ // CHECK-RV32-LABEL: @test_vse64_v_i64m2_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse64_v_i64m2_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse64_v_i64m2_m(vbool32_t mask, int64_t *base, vint64m2_t value, size_t vl) { @@ -1181,13 +1181,13 @@ // CHECK-RV32-LABEL: @test_vse64_v_i64m4_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse64_v_i64m4_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse64_v_i64m4_m(vbool16_t mask, int64_t *base, vint64m4_t value, size_t vl) { @@ -1197,13 +1197,13 @@ // CHECK-RV32-LABEL: @test_vse64_v_i64m8_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse64_v_i64m8_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse64_v_i64m8_m(vbool8_t mask, int64_t *base, vint64m8_t value, size_t vl) { @@ -1213,13 +1213,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_u8mf8_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_u8mf8_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_u8mf8_m(vbool64_t mask, uint8_t *base, vuint8mf8_t value, size_t vl) { @@ -1229,13 +1229,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_u8mf4_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_u8mf4_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_u8mf4_m(vbool32_t mask, uint8_t *base, vuint8mf4_t value, size_t vl) { @@ -1245,13 +1245,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_u8mf2_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_u8mf2_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_u8mf2_m(vbool16_t mask, uint8_t *base, vuint8mf2_t value, size_t vl) { @@ -1261,13 +1261,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_u8m1_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_u8m1_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_u8m1_m(vbool8_t mask, uint8_t *base, vuint8m1_t value, size_t vl) { @@ -1277,13 +1277,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_u8m2_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_u8m2_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_u8m2_m(vbool4_t mask, uint8_t *base, vuint8m2_t value, size_t vl) { @@ -1293,13 +1293,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_u8m4_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_u8m4_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_u8m4_m(vbool2_t mask, uint8_t *base, vuint8m4_t value, size_t vl) { @@ -1309,13 +1309,13 @@ // CHECK-RV32-LABEL: @test_vse8_v_u8m8_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse8_v_u8m8_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse8_v_u8m8_m(vbool1_t mask, uint8_t *base, vuint8m8_t value, size_t vl) { @@ -1325,13 +1325,13 @@ // CHECK-RV32-LABEL: @test_vse16_v_u16mf4_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse16_v_u16mf4_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse16_v_u16mf4_m(vbool64_t mask, uint16_t *base, vuint16mf4_t value, size_t vl) { @@ -1341,13 +1341,13 @@ // CHECK-RV32-LABEL: @test_vse16_v_u16mf2_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse16_v_u16mf2_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse16_v_u16mf2_m(vbool32_t mask, uint16_t *base, vuint16mf2_t value, size_t vl) { @@ -1357,13 +1357,13 @@ // CHECK-RV32-LABEL: @test_vse16_v_u16m1_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse16_v_u16m1_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse16_v_u16m1_m(vbool16_t mask, uint16_t *base, vuint16m1_t value, size_t vl) { @@ -1373,13 +1373,13 @@ // CHECK-RV32-LABEL: @test_vse16_v_u16m2_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse16_v_u16m2_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse16_v_u16m2_m(vbool8_t mask, uint16_t *base, vuint16m2_t value, size_t vl) { @@ -1389,13 +1389,13 @@ // CHECK-RV32-LABEL: @test_vse16_v_u16m4_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse16_v_u16m4_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse16_v_u16m4_m(vbool4_t mask, uint16_t *base, vuint16m4_t value, size_t vl) { @@ -1405,13 +1405,13 @@ // CHECK-RV32-LABEL: @test_vse16_v_u16m8_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse16_v_u16m8_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse16_v_u16m8_m(vbool2_t mask, uint16_t *base, vuint16m8_t value, size_t vl) { @@ -1421,13 +1421,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_u32mf2_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_u32mf2_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_u32mf2_m(vbool64_t mask, uint32_t *base, vuint32mf2_t value, size_t vl) { @@ -1437,13 +1437,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_u32m1_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_u32m1_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_u32m1_m(vbool32_t mask, uint32_t *base, vuint32m1_t value, size_t vl) { @@ -1453,13 +1453,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_u32m2_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_u32m2_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_u32m2_m(vbool16_t mask, uint32_t *base, vuint32m2_t value, size_t vl) { @@ -1469,13 +1469,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_u32m4_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_u32m4_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_u32m4_m(vbool8_t mask, uint32_t *base, vuint32m4_t value, size_t vl) { @@ -1485,13 +1485,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_u32m8_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_u32m8_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_u32m8_m(vbool4_t mask, uint32_t *base, vuint32m8_t value, size_t vl) { @@ -1501,13 +1501,13 @@ // CHECK-RV32-LABEL: @test_vse64_v_u64m1_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse64_v_u64m1_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse64_v_u64m1_m(vbool64_t mask, uint64_t *base, vuint64m1_t value, size_t vl) { @@ -1517,13 +1517,13 @@ // CHECK-RV32-LABEL: @test_vse64_v_u64m2_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse64_v_u64m2_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse64_v_u64m2_m(vbool32_t mask, uint64_t *base, vuint64m2_t value, size_t vl) { @@ -1533,13 +1533,13 @@ // CHECK-RV32-LABEL: @test_vse64_v_u64m4_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse64_v_u64m4_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse64_v_u64m4_m(vbool16_t mask, uint64_t *base, vuint64m4_t value, size_t vl) { @@ -1549,13 +1549,13 @@ // CHECK-RV32-LABEL: @test_vse64_v_u64m8_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse64_v_u64m8_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse64_v_u64m8_m(vbool8_t mask, uint64_t *base, vuint64m8_t value, size_t vl) { @@ -1565,13 +1565,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_f32mf2_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv1f32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv1f32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_f32mf2_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv1f32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv1f32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_f32mf2_m(vbool64_t mask, float *base, vfloat32mf2_t value, size_t vl) { @@ -1581,13 +1581,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_f32m1_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv2f32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv2f32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_f32m1_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv2f32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv2f32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_f32m1_m(vbool32_t mask, float *base, vfloat32m1_t value, size_t vl) { @@ -1597,13 +1597,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_f32m2_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv4f32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv4f32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_f32m2_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv4f32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv4f32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_f32m2_m(vbool16_t mask, float *base, vfloat32m2_t value, size_t vl) { @@ -1613,13 +1613,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_f32m4_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv8f32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv8f32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_f32m4_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv8f32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv8f32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_f32m4_m(vbool8_t mask, float *base, vfloat32m4_t value, size_t vl) { @@ -1629,13 +1629,13 @@ // CHECK-RV32-LABEL: @test_vse32_v_f32m8_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv16f32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv16f32.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse32_v_f32m8_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv16f32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv16f32.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse32_v_f32m8_m(vbool4_t mask, float *base, vfloat32m8_t value, size_t vl) { @@ -1645,13 +1645,13 @@ // CHECK-RV32-LABEL: @test_vse64_v_f64m1_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv1f64.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv1f64.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse64_v_f64m1_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv1f64.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv1f64.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse64_v_f64m1_m(vbool64_t mask, double *base, vfloat64m1_t value, size_t vl) { @@ -1661,13 +1661,13 @@ // CHECK-RV32-LABEL: @test_vse64_v_f64m2_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv2f64.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv2f64.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse64_v_f64m2_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv2f64.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv2f64.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse64_v_f64m2_m(vbool32_t mask, double *base, vfloat64m2_t value, size_t vl) { @@ -1677,13 +1677,13 @@ // CHECK-RV32-LABEL: @test_vse64_v_f64m4_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv4f64.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv4f64.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse64_v_f64m4_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv4f64.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv4f64.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse64_v_f64m4_m(vbool16_t mask, double *base, vfloat64m4_t value, size_t vl) { @@ -1693,15 +1693,127 @@ // CHECK-RV32-LABEL: @test_vse64_v_f64m8_m( // CHECK-RV32-NEXT: entry: // CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * -// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv8f64.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV32-NEXT: call void @llvm.riscv.vse.mask.nxv8f64.i32( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR11]] // CHECK-RV32-NEXT: ret void // // CHECK-RV64-LABEL: @test_vse64_v_f64m8_m( // CHECK-RV64-NEXT: entry: // CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * -// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv8f64.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) #[[ATTR11]] +// CHECK-RV64-NEXT: call void @llvm.riscv.vse.mask.nxv8f64.i64( [[VALUE:%.*]], * [[TMP0]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR11]] // CHECK-RV64-NEXT: ret void // void test_vse64_v_f64m8_m(vbool8_t mask, double *base, vfloat64m8_t value, size_t vl) { return vse64(mask, base, value, vl); } + +// CHECK-RV32-LABEL: @test_vse1_v_b1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vse1.nxv64i1.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vse1_v_b1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vse1.nxv64i1.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] +// CHECK-RV64-NEXT: ret void +// +void test_vse1_v_b1(uint8_t *base, vbool1_t value, size_t vl) { + return vse1(base, value, vl); +} + +// CHECK-RV32-LABEL: @test_vse1_v_b2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vse1.nxv32i1.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vse1_v_b2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vse1.nxv32i1.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] +// CHECK-RV64-NEXT: ret void +// +void test_vse1_v_b2(uint8_t *base, vbool2_t value, size_t vl) { + return vse1(base, value, vl); +} + +// CHECK-RV32-LABEL: @test_vse1_v_b4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vse1.nxv16i1.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vse1_v_b4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vse1.nxv16i1.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] +// CHECK-RV64-NEXT: ret void +// +void test_vse1_v_b4(uint8_t *base, vbool4_t value, size_t vl) { + return vse1(base, value, vl); +} + +// CHECK-RV32-LABEL: @test_vse1_v_b8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vse1.nxv8i1.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vse1_v_b8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vse1.nxv8i1.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] +// CHECK-RV64-NEXT: ret void +// +void test_vse1_v_b8(uint8_t *base, vbool8_t value, size_t vl) { + return vse1(base, value, vl); +} + +// CHECK-RV32-LABEL: @test_vse1_v_b16( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vse1.nxv4i1.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vse1_v_b16( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vse1.nxv4i1.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] +// CHECK-RV64-NEXT: ret void +// +void test_vse1_v_b16(uint8_t *base, vbool16_t value, size_t vl) { + return vse1(base, value, vl); +} + +// CHECK-RV32-LABEL: @test_vse1_v_b32( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vse1.nxv2i1.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vse1_v_b32( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vse1.nxv2i1.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] +// CHECK-RV64-NEXT: ret void +// +void test_vse1_v_b32(uint8_t *base, vbool32_t value, size_t vl) { + return vse1(base, value, vl); +} + +// CHECK-RV32-LABEL: @test_vse1_v_b64( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vse1.nxv1i1.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) [[ATTR11]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vse1_v_b64( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vse1.nxv1i1.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) [[ATTR11]] +// CHECK-RV64-NEXT: ret void +// +void test_vse1_v_b64(uint8_t *base, vbool64_t value, size_t vl) { + return vse1(base, value, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vsoxei.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vsoxei.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vsoxei.c @@ -0,0 +1,6523 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv32 -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV32 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -Werror -Wall -o - %s -S >/dev/null 2>&1 | FileCheck --check-prefix=ASM --allow-empty %s + +// ASM-NOT: warning +#include + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8:#.*]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8:#.*]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8mf8(int8_t *base, vuint8mf8_t bindex, vint8mf8_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8mf4(int8_t *base, vuint8mf4_t bindex, vint8mf4_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8mf2(int8_t *base, vuint8mf2_t bindex, vint8mf2_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8m1(int8_t *base, vuint8m1_t bindex, vint8m1_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8m2(int8_t *base, vuint8m2_t bindex, vint8m2_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv32i8.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i8.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8m4(int8_t *base, vuint8m4_t bindex, vint8m4_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv64i8.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv64i8.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8m8(int8_t *base, vuint8m8_t bindex, vint8m8_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i8mf8(int8_t *base, vuint16mf4_t bindex, vint8mf8_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i8mf4(int8_t *base, vuint16mf2_t bindex, vint8mf4_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i8mf2(int8_t *base, vuint16m1_t bindex, vint8mf2_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i8m1(int8_t *base, vuint16m2_t bindex, vint8m1_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i8m2(int8_t *base, vuint16m4_t bindex, vint8m2_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i8m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv32i8.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i8m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i8.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i8m4(int8_t *base, vuint16m8_t bindex, vint8m4_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i8mf8(int8_t *base, vuint32mf2_t bindex, vint8mf8_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i8mf4(int8_t *base, vuint32m1_t bindex, vint8mf4_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i8mf2(int8_t *base, vuint32m2_t bindex, vint8mf2_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i8m1(int8_t *base, vuint32m4_t bindex, vint8m1_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i8m2(int8_t *base, vuint32m8_t bindex, vint8m2_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i8mf8(int8_t *base, vuint64m1_t bindex, vint8mf8_t value, + size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i8mf4(int8_t *base, vuint64m2_t bindex, vint8mf4_t value, + size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i8mf2(int8_t *base, vuint64m4_t bindex, vint8mf2_t value, + size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i8m1(int8_t *base, vuint64m8_t bindex, vint8m1_t value, + size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i16mf4(int16_t *base, vuint8mf8_t bindex, vint16mf4_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i16mf2(int16_t *base, vuint8mf4_t bindex, vint16mf2_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i16m1(int16_t *base, vuint8mf2_t bindex, vint16m1_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i16m2(int16_t *base, vuint8m1_t bindex, vint16m2_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i16m4(int16_t *base, vuint8m2_t bindex, vint16m4_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i16m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv32i16.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i16m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i16.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i16m8(int16_t *base, vuint8m4_t bindex, vint16m8_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i16mf4(int16_t *base, vuint16mf4_t bindex, + vint16mf4_t value, size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i16mf2(int16_t *base, vuint16mf2_t bindex, + vint16mf2_t value, size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i16m1(int16_t *base, vuint16m1_t bindex, vint16m1_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i16m2(int16_t *base, vuint16m2_t bindex, vint16m2_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i16m4(int16_t *base, vuint16m4_t bindex, vint16m4_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i16m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv32i16.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i16m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i16.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i16m8(int16_t *base, vuint16m8_t bindex, vint16m8_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i16mf4(int16_t *base, vuint32mf2_t bindex, + vint16mf4_t value, size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i16mf2(int16_t *base, vuint32m1_t bindex, + vint16mf2_t value, size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i16m1(int16_t *base, vuint32m2_t bindex, vint16m1_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i16m2(int16_t *base, vuint32m4_t bindex, vint16m2_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i16m4(int16_t *base, vuint32m8_t bindex, vint16m4_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i16mf4(int16_t *base, vuint64m1_t bindex, + vint16mf4_t value, size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i16mf2(int16_t *base, vuint64m2_t bindex, + vint16mf2_t value, size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i16m1(int16_t *base, vuint64m4_t bindex, vint16m1_t value, + size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i16m2(int16_t *base, vuint64m8_t bindex, vint16m2_t value, + size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i32mf2(int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i32m1(int32_t *base, vuint8mf4_t bindex, vint32m1_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i32m2(int32_t *base, vuint8mf2_t bindex, vint32m2_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i32m4(int32_t *base, vuint8m1_t bindex, vint32m4_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i32m8(int32_t *base, vuint8m2_t bindex, vint32m8_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i32mf2(int32_t *base, vuint16mf4_t bindex, + vint32mf2_t value, size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i32m1(int32_t *base, vuint16mf2_t bindex, vint32m1_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i32m2(int32_t *base, vuint16m1_t bindex, vint32m2_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i32m4(int32_t *base, vuint16m2_t bindex, vint32m4_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i32m8(int32_t *base, vuint16m4_t bindex, vint32m8_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i32mf2(int32_t *base, vuint32mf2_t bindex, + vint32mf2_t value, size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i32m1(int32_t *base, vuint32m1_t bindex, vint32m1_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i32m2(int32_t *base, vuint32m2_t bindex, vint32m2_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i32m4(int32_t *base, vuint32m4_t bindex, vint32m4_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i32m8(int32_t *base, vuint32m8_t bindex, vint32m8_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i32mf2(int32_t *base, vuint64m1_t bindex, + vint32mf2_t value, size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i32m1(int32_t *base, vuint64m2_t bindex, vint32m1_t value, + size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i32m2(int32_t *base, vuint64m4_t bindex, vint32m2_t value, + size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i32m4(int32_t *base, vuint64m8_t bindex, vint32m4_t value, + size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i64m1(int64_t *base, vuint8mf8_t bindex, vint64m1_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i64m2(int64_t *base, vuint8mf4_t bindex, vint64m2_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i64m4(int64_t *base, vuint8mf2_t bindex, vint64m4_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i64m8(int64_t *base, vuint8m1_t bindex, vint64m8_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i64m1(int64_t *base, vuint16mf4_t bindex, vint64m1_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i64m2(int64_t *base, vuint16mf2_t bindex, vint64m2_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i64m4(int64_t *base, vuint16m1_t bindex, vint64m4_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i64m8(int64_t *base, vuint16m2_t bindex, vint64m8_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i64m1(int64_t *base, vuint32mf2_t bindex, vint64m1_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i64m2(int64_t *base, vuint32m1_t bindex, vint64m2_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i64m4(int64_t *base, vuint32m2_t bindex, vint64m4_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i64m8(int64_t *base, vuint32m4_t bindex, vint64m8_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i64m1(int64_t *base, vuint64m1_t bindex, vint64m1_t value, + size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i64m2(int64_t *base, vuint64m2_t bindex, vint64m2_t value, + size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i64m4(int64_t *base, vuint64m4_t bindex, vint64m4_t value, + size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i64m8(int64_t *base, vuint64m8_t bindex, vint64m8_t value, + size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8mf8(uint8_t *base, vuint8mf8_t bindex, vuint8mf8_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8mf4(uint8_t *base, vuint8mf4_t bindex, vuint8mf4_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8mf2(uint8_t *base, vuint8mf2_t bindex, vuint8mf2_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8m1(uint8_t *base, vuint8m1_t bindex, vuint8m1_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8m2(uint8_t *base, vuint8m2_t bindex, vuint8m2_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv32i8.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i8.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8m4(uint8_t *base, vuint8m4_t bindex, vuint8m4_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv64i8.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv64i8.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8m8(uint8_t *base, vuint8m8_t bindex, vuint8m8_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u8mf8(uint8_t *base, vuint16mf4_t bindex, + vuint8mf8_t value, size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u8mf4(uint8_t *base, vuint16mf2_t bindex, + vuint8mf4_t value, size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u8mf2(uint8_t *base, vuint16m1_t bindex, vuint8mf2_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u8m1(uint8_t *base, vuint16m2_t bindex, vuint8m1_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u8m2(uint8_t *base, vuint16m4_t bindex, vuint8m2_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u8m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv32i8.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u8m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i8.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u8m4(uint8_t *base, vuint16m8_t bindex, vuint8m4_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u8mf8(uint8_t *base, vuint32mf2_t bindex, + vuint8mf8_t value, size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u8mf4(uint8_t *base, vuint32m1_t bindex, vuint8mf4_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u8mf2(uint8_t *base, vuint32m2_t bindex, vuint8mf2_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u8m1(uint8_t *base, vuint32m4_t bindex, vuint8m1_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u8m2(uint8_t *base, vuint32m8_t bindex, vuint8m2_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u8mf8(uint8_t *base, vuint64m1_t bindex, vuint8mf8_t value, + size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u8mf4(uint8_t *base, vuint64m2_t bindex, vuint8mf4_t value, + size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u8mf2(uint8_t *base, vuint64m4_t bindex, vuint8mf2_t value, + size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u8m1(uint8_t *base, vuint64m8_t bindex, vuint8m1_t value, + size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u16mf4(uint16_t *base, vuint8mf8_t bindex, + vuint16mf4_t value, size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u16mf2(uint16_t *base, vuint8mf4_t bindex, + vuint16mf2_t value, size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u16m1(uint16_t *base, vuint8mf2_t bindex, vuint16m1_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u16m2(uint16_t *base, vuint8m1_t bindex, vuint16m2_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u16m4(uint16_t *base, vuint8m2_t bindex, vuint16m4_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u16m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv32i16.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u16m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i16.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u16m8(uint16_t *base, vuint8m4_t bindex, vuint16m8_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u16mf4(uint16_t *base, vuint16mf4_t bindex, + vuint16mf4_t value, size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u16mf2(uint16_t *base, vuint16mf2_t bindex, + vuint16mf2_t value, size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u16m1(uint16_t *base, vuint16m1_t bindex, + vuint16m1_t value, size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u16m2(uint16_t *base, vuint16m2_t bindex, + vuint16m2_t value, size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u16m4(uint16_t *base, vuint16m4_t bindex, + vuint16m4_t value, size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u16m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv32i16.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u16m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i16.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u16m8(uint16_t *base, vuint16m8_t bindex, + vuint16m8_t value, size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u16mf4(uint16_t *base, vuint32mf2_t bindex, + vuint16mf4_t value, size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u16mf2(uint16_t *base, vuint32m1_t bindex, + vuint16mf2_t value, size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u16m1(uint16_t *base, vuint32m2_t bindex, + vuint16m1_t value, size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u16m2(uint16_t *base, vuint32m4_t bindex, + vuint16m2_t value, size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u16m4(uint16_t *base, vuint32m8_t bindex, + vuint16m4_t value, size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u16mf4(uint16_t *base, vuint64m1_t bindex, + vuint16mf4_t value, size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u16mf2(uint16_t *base, vuint64m2_t bindex, + vuint16mf2_t value, size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u16m1(uint16_t *base, vuint64m4_t bindex, + vuint16m1_t value, size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u16m2(uint16_t *base, vuint64m8_t bindex, + vuint16m2_t value, size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u32mf2(uint32_t *base, vuint8mf8_t bindex, + vuint32mf2_t value, size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u32m1(uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u32m2(uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u32m4(uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u32m8(uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u32mf2(uint32_t *base, vuint16mf4_t bindex, + vuint32mf2_t value, size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u32m1(uint32_t *base, vuint16mf2_t bindex, + vuint32m1_t value, size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u32m2(uint32_t *base, vuint16m1_t bindex, + vuint32m2_t value, size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u32m4(uint32_t *base, vuint16m2_t bindex, + vuint32m4_t value, size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u32m8(uint32_t *base, vuint16m4_t bindex, + vuint32m8_t value, size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u32mf2(uint32_t *base, vuint32mf2_t bindex, + vuint32mf2_t value, size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u32m1(uint32_t *base, vuint32m1_t bindex, + vuint32m1_t value, size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u32m2(uint32_t *base, vuint32m2_t bindex, + vuint32m2_t value, size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u32m4(uint32_t *base, vuint32m4_t bindex, + vuint32m4_t value, size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u32m8(uint32_t *base, vuint32m8_t bindex, + vuint32m8_t value, size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u32mf2(uint32_t *base, vuint64m1_t bindex, + vuint32mf2_t value, size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u32m1(uint32_t *base, vuint64m2_t bindex, + vuint32m1_t value, size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u32m2(uint32_t *base, vuint64m4_t bindex, + vuint32m2_t value, size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u32m4(uint32_t *base, vuint64m8_t bindex, + vuint32m4_t value, size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u64m1(uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u64m2(uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u64m4(uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u64m8(uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u64m1(uint64_t *base, vuint16mf4_t bindex, + vuint64m1_t value, size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u64m2(uint64_t *base, vuint16mf2_t bindex, + vuint64m2_t value, size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u64m4(uint64_t *base, vuint16m1_t bindex, + vuint64m4_t value, size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u64m8(uint64_t *base, vuint16m2_t bindex, + vuint64m8_t value, size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u64m1(uint64_t *base, vuint32mf2_t bindex, + vuint64m1_t value, size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u64m2(uint64_t *base, vuint32m1_t bindex, + vuint64m2_t value, size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u64m4(uint64_t *base, vuint32m2_t bindex, + vuint64m4_t value, size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u64m8(uint64_t *base, vuint32m4_t bindex, + vuint64m8_t value, size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u64m1(uint64_t *base, vuint64m1_t bindex, + vuint64m1_t value, size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u64m2(uint64_t *base, vuint64m2_t bindex, + vuint64m2_t value, size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u64m4(uint64_t *base, vuint64m4_t bindex, + vuint64m4_t value, size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u64m8(uint64_t *base, vuint64m8_t bindex, + vuint64m8_t value, size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1f32.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f32.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f32mf2(float *base, vuint8mf8_t bindex, vfloat32mf2_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2f32.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f32.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f32m1(float *base, vuint8mf4_t bindex, vfloat32m1_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4f32.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f32.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f32m2(float *base, vuint8mf2_t bindex, vfloat32m2_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8f32.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f32.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f32m4(float *base, vuint8m1_t bindex, vfloat32m4_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16f32.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16f32.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f32m8(float *base, vuint8m2_t bindex, vfloat32m8_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1f32.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f32.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f32mf2(float *base, vuint16mf4_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2f32.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f32.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f32m1(float *base, vuint16mf2_t bindex, vfloat32m1_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4f32.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f32.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f32m2(float *base, vuint16m1_t bindex, vfloat32m2_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8f32.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f32.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f32m4(float *base, vuint16m2_t bindex, vfloat32m4_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16f32.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16f32.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f32m8(float *base, vuint16m4_t bindex, vfloat32m8_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1f32.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f32.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f32mf2(float *base, vuint32mf2_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2f32.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f32.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f32m1(float *base, vuint32m1_t bindex, vfloat32m1_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4f32.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f32.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f32m2(float *base, vuint32m2_t bindex, vfloat32m2_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8f32.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f32.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f32m4(float *base, vuint32m4_t bindex, vfloat32m4_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16f32.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16f32.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f32m8(float *base, vuint32m8_t bindex, vfloat32m8_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1f32.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f32.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f32mf2(float *base, vuint64m1_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2f32.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f32.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f32m1(float *base, vuint64m2_t bindex, vfloat32m1_t value, + size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4f32.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f32.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f32m2(float *base, vuint64m4_t bindex, vfloat32m2_t value, + size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8f32.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f32.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f32m4(float *base, vuint64m8_t bindex, vfloat32m4_t value, + size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1f64.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f64.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f64m1(double *base, vuint8mf8_t bindex, vfloat64m1_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2f64.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f64.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f64m2(double *base, vuint8mf4_t bindex, vfloat64m2_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4f64.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f64.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f64m4(double *base, vuint8mf2_t bindex, vfloat64m4_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8f64.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f64.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f64m8(double *base, vuint8m1_t bindex, vfloat64m8_t value, + size_t vl) { + return vsoxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1f64.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f64.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f64m1(double *base, vuint16mf4_t bindex, + vfloat64m1_t value, size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2f64.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f64.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f64m2(double *base, vuint16mf2_t bindex, + vfloat64m2_t value, size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4f64.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f64.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f64m4(double *base, vuint16m1_t bindex, vfloat64m4_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8f64.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f64.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f64m8(double *base, vuint16m2_t bindex, vfloat64m8_t value, + size_t vl) { + return vsoxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1f64.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f64.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f64m1(double *base, vuint32mf2_t bindex, + vfloat64m1_t value, size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2f64.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f64.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f64m2(double *base, vuint32m1_t bindex, vfloat64m2_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4f64.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f64.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f64m4(double *base, vuint32m2_t bindex, vfloat64m4_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8f64.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f64.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f64m8(double *base, vuint32m4_t bindex, vfloat64m8_t value, + size_t vl) { + return vsoxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1f64.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f64.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f64m1(double *base, vuint64m1_t bindex, vfloat64m1_t value, + size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2f64.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f64.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f64m2(double *base, vuint64m2_t bindex, vfloat64m2_t value, + size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4f64.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f64.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f64m4(double *base, vuint64m4_t bindex, vfloat64m4_t value, + size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8f64.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f64.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f64m8(double *base, vuint64m8_t bindex, vfloat64m8_t value, + size_t vl) { + return vsoxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8mf8_m(vbool64_t mask, int8_t *base, vuint8mf8_t bindex, + vint8mf8_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8mf4_m(vbool32_t mask, int8_t *base, vuint8mf4_t bindex, + vint8mf4_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8mf2_m(vbool16_t mask, int8_t *base, vuint8mf2_t bindex, + vint8mf2_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8m1_m(vbool8_t mask, int8_t *base, vuint8m1_t bindex, + vint8m1_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8m2_m(vbool4_t mask, int8_t *base, vuint8m2_t bindex, + vint8m2_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i8.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i8.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8m4_m(vbool2_t mask, int8_t *base, vuint8m4_t bindex, + vint8m4_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv64i8.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv64i8.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8m8_m(vbool1_t mask, int8_t *base, vuint8m8_t bindex, + vint8m8_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i8mf8_m(vbool64_t mask, int8_t *base, vuint16mf4_t bindex, + vint8mf8_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i8mf4_m(vbool32_t mask, int8_t *base, vuint16mf2_t bindex, + vint8mf4_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i8mf2_m(vbool16_t mask, int8_t *base, vuint16m1_t bindex, + vint8mf2_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i8m1_m(vbool8_t mask, int8_t *base, vuint16m2_t bindex, + vint8m1_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i8m2_m(vbool4_t mask, int8_t *base, vuint16m4_t bindex, + vint8m2_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i8m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i8.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i8m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i8.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i8m4_m(vbool2_t mask, int8_t *base, vuint16m8_t bindex, + vint8m4_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i8mf8_m(vbool64_t mask, int8_t *base, vuint32mf2_t bindex, + vint8mf8_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i8mf4_m(vbool32_t mask, int8_t *base, vuint32m1_t bindex, + vint8mf4_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i8mf2_m(vbool16_t mask, int8_t *base, vuint32m2_t bindex, + vint8mf2_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i8m1_m(vbool8_t mask, int8_t *base, vuint32m4_t bindex, + vint8m1_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i8m2_m(vbool4_t mask, int8_t *base, vuint32m8_t bindex, + vint8m2_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i8mf8_m(vbool64_t mask, int8_t *base, vuint64m1_t bindex, + vint8mf8_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i8mf4_m(vbool32_t mask, int8_t *base, vuint64m2_t bindex, + vint8mf4_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i8mf2_m(vbool16_t mask, int8_t *base, vuint64m4_t bindex, + vint8mf2_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i8m1_m(vbool8_t mask, int8_t *base, vuint64m8_t bindex, + vint8m1_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i16mf4_m(vbool64_t mask, int16_t *base, vuint8mf8_t bindex, + vint16mf4_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i16mf2_m(vbool32_t mask, int16_t *base, vuint8mf4_t bindex, + vint16mf2_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i16m1_m(vbool16_t mask, int16_t *base, vuint8mf2_t bindex, + vint16m1_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i16m2_m(vbool8_t mask, int16_t *base, vuint8m1_t bindex, + vint16m2_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i16m4_m(vbool4_t mask, int16_t *base, vuint8m2_t bindex, + vint16m4_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i16m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i16.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i16m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i16.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i16m8_m(vbool2_t mask, int16_t *base, vuint8m4_t bindex, + vint16m8_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i16mf4_m(vbool64_t mask, int16_t *base, + vuint16mf4_t bindex, vint16mf4_t value, + size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i16mf2_m(vbool32_t mask, int16_t *base, + vuint16mf2_t bindex, vint16mf2_t value, + size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i16m1_m(vbool16_t mask, int16_t *base, vuint16m1_t bindex, + vint16m1_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i16m2_m(vbool8_t mask, int16_t *base, vuint16m2_t bindex, + vint16m2_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i16m4_m(vbool4_t mask, int16_t *base, vuint16m4_t bindex, + vint16m4_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i16m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i16.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i16m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i16.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i16m8_m(vbool2_t mask, int16_t *base, vuint16m8_t bindex, + vint16m8_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i16mf4_m(vbool64_t mask, int16_t *base, + vuint32mf2_t bindex, vint16mf4_t value, + size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i16mf2_m(vbool32_t mask, int16_t *base, vuint32m1_t bindex, + vint16mf2_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i16m1_m(vbool16_t mask, int16_t *base, vuint32m2_t bindex, + vint16m1_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i16m2_m(vbool8_t mask, int16_t *base, vuint32m4_t bindex, + vint16m2_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i16m4_m(vbool4_t mask, int16_t *base, vuint32m8_t bindex, + vint16m4_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i16mf4_m(vbool64_t mask, int16_t *base, vuint64m1_t bindex, + vint16mf4_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i16mf2_m(vbool32_t mask, int16_t *base, vuint64m2_t bindex, + vint16mf2_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i16m1_m(vbool16_t mask, int16_t *base, vuint64m4_t bindex, + vint16m1_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i16m2_m(vbool8_t mask, int16_t *base, vuint64m8_t bindex, + vint16m2_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i32mf2_m(vbool64_t mask, int32_t *base, vuint8mf8_t bindex, + vint32mf2_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i32m1_m(vbool32_t mask, int32_t *base, vuint8mf4_t bindex, + vint32m1_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i32m2_m(vbool16_t mask, int32_t *base, vuint8mf2_t bindex, + vint32m2_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i32m4_m(vbool8_t mask, int32_t *base, vuint8m1_t bindex, + vint32m4_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i32m8_m(vbool4_t mask, int32_t *base, vuint8m2_t bindex, + vint32m8_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i32mf2_m(vbool64_t mask, int32_t *base, + vuint16mf4_t bindex, vint32mf2_t value, + size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i32m1_m(vbool32_t mask, int32_t *base, vuint16mf2_t bindex, + vint32m1_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i32m2_m(vbool16_t mask, int32_t *base, vuint16m1_t bindex, + vint32m2_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i32m4_m(vbool8_t mask, int32_t *base, vuint16m2_t bindex, + vint32m4_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i32m8_m(vbool4_t mask, int32_t *base, vuint16m4_t bindex, + vint32m8_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i32mf2_m(vbool64_t mask, int32_t *base, + vuint32mf2_t bindex, vint32mf2_t value, + size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i32m1_m(vbool32_t mask, int32_t *base, vuint32m1_t bindex, + vint32m1_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i32m2_m(vbool16_t mask, int32_t *base, vuint32m2_t bindex, + vint32m2_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i32m4_m(vbool8_t mask, int32_t *base, vuint32m4_t bindex, + vint32m4_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i32m8_m(vbool4_t mask, int32_t *base, vuint32m8_t bindex, + vint32m8_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i32mf2_m(vbool64_t mask, int32_t *base, vuint64m1_t bindex, + vint32mf2_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i32m1_m(vbool32_t mask, int32_t *base, vuint64m2_t bindex, + vint32m1_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i32m2_m(vbool16_t mask, int32_t *base, vuint64m4_t bindex, + vint32m2_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i32m4_m(vbool8_t mask, int32_t *base, vuint64m8_t bindex, + vint32m4_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i64m1_m(vbool64_t mask, int64_t *base, vuint8mf8_t bindex, + vint64m1_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i64m2_m(vbool32_t mask, int64_t *base, vuint8mf4_t bindex, + vint64m2_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i64m4_m(vbool16_t mask, int64_t *base, vuint8mf2_t bindex, + vint64m4_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i64m8_m(vbool8_t mask, int64_t *base, vuint8m1_t bindex, + vint64m8_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i64m1_m(vbool64_t mask, int64_t *base, vuint16mf4_t bindex, + vint64m1_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i64m2_m(vbool32_t mask, int64_t *base, vuint16mf2_t bindex, + vint64m2_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i64m4_m(vbool16_t mask, int64_t *base, vuint16m1_t bindex, + vint64m4_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i64m8_m(vbool8_t mask, int64_t *base, vuint16m2_t bindex, + vint64m8_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i64m1_m(vbool64_t mask, int64_t *base, vuint32mf2_t bindex, + vint64m1_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i64m2_m(vbool32_t mask, int64_t *base, vuint32m1_t bindex, + vint64m2_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i64m4_m(vbool16_t mask, int64_t *base, vuint32m2_t bindex, + vint64m4_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i64m8_m(vbool8_t mask, int64_t *base, vuint32m4_t bindex, + vint64m8_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i64m1_m(vbool64_t mask, int64_t *base, vuint64m1_t bindex, + vint64m1_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i64m2_m(vbool32_t mask, int64_t *base, vuint64m2_t bindex, + vint64m2_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i64m4_m(vbool16_t mask, int64_t *base, vuint64m4_t bindex, + vint64m4_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i64m8_m(vbool8_t mask, int64_t *base, vuint64m8_t bindex, + vint64m8_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8mf8_m(vbool64_t mask, uint8_t *base, vuint8mf8_t bindex, + vuint8mf8_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8mf4_m(vbool32_t mask, uint8_t *base, vuint8mf4_t bindex, + vuint8mf4_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8mf2_m(vbool16_t mask, uint8_t *base, vuint8mf2_t bindex, + vuint8mf2_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8m1_m(vbool8_t mask, uint8_t *base, vuint8m1_t bindex, + vuint8m1_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8m2_m(vbool4_t mask, uint8_t *base, vuint8m2_t bindex, + vuint8m2_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i8.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i8.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8m4_m(vbool2_t mask, uint8_t *base, vuint8m4_t bindex, + vuint8m4_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv64i8.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv64i8.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8m8_m(vbool1_t mask, uint8_t *base, vuint8m8_t bindex, + vuint8m8_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u8mf8_m(vbool64_t mask, uint8_t *base, vuint16mf4_t bindex, + vuint8mf8_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u8mf4_m(vbool32_t mask, uint8_t *base, vuint16mf2_t bindex, + vuint8mf4_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u8mf2_m(vbool16_t mask, uint8_t *base, vuint16m1_t bindex, + vuint8mf2_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u8m1_m(vbool8_t mask, uint8_t *base, vuint16m2_t bindex, + vuint8m1_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u8m2_m(vbool4_t mask, uint8_t *base, vuint16m4_t bindex, + vuint8m2_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u8m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i8.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u8m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i8.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u8m4_m(vbool2_t mask, uint8_t *base, vuint16m8_t bindex, + vuint8m4_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u8mf8_m(vbool64_t mask, uint8_t *base, vuint32mf2_t bindex, + vuint8mf8_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u8mf4_m(vbool32_t mask, uint8_t *base, vuint32m1_t bindex, + vuint8mf4_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u8mf2_m(vbool16_t mask, uint8_t *base, vuint32m2_t bindex, + vuint8mf2_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u8m1_m(vbool8_t mask, uint8_t *base, vuint32m4_t bindex, + vuint8m1_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u8m2_m(vbool4_t mask, uint8_t *base, vuint32m8_t bindex, + vuint8m2_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u8mf8_m(vbool64_t mask, uint8_t *base, vuint64m1_t bindex, + vuint8mf8_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u8mf4_m(vbool32_t mask, uint8_t *base, vuint64m2_t bindex, + vuint8mf4_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u8mf2_m(vbool16_t mask, uint8_t *base, vuint64m4_t bindex, + vuint8mf2_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u8m1_m(vbool8_t mask, uint8_t *base, vuint64m8_t bindex, + vuint8m1_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u16mf4_m(vbool64_t mask, uint16_t *base, vuint8mf8_t bindex, + vuint16mf4_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u16mf2_m(vbool32_t mask, uint16_t *base, vuint8mf4_t bindex, + vuint16mf2_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u16m1_m(vbool16_t mask, uint16_t *base, vuint8mf2_t bindex, + vuint16m1_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u16m2_m(vbool8_t mask, uint16_t *base, vuint8m1_t bindex, + vuint16m2_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u16m4_m(vbool4_t mask, uint16_t *base, vuint8m2_t bindex, + vuint16m4_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u16m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i16.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u16m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i16.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u16m8_m(vbool2_t mask, uint16_t *base, vuint8m4_t bindex, + vuint16m8_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u16mf4_m(vbool64_t mask, uint16_t *base, + vuint16mf4_t bindex, vuint16mf4_t value, + size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u16mf2_m(vbool32_t mask, uint16_t *base, + vuint16mf2_t bindex, vuint16mf2_t value, + size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u16m1_m(vbool16_t mask, uint16_t *base, vuint16m1_t bindex, + vuint16m1_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u16m2_m(vbool8_t mask, uint16_t *base, vuint16m2_t bindex, + vuint16m2_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u16m4_m(vbool4_t mask, uint16_t *base, vuint16m4_t bindex, + vuint16m4_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u16m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i16.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u16m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i16.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u16m8_m(vbool2_t mask, uint16_t *base, vuint16m8_t bindex, + vuint16m8_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u16mf4_m(vbool64_t mask, uint16_t *base, + vuint32mf2_t bindex, vuint16mf4_t value, + size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u16mf2_m(vbool32_t mask, uint16_t *base, + vuint32m1_t bindex, vuint16mf2_t value, + size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u16m1_m(vbool16_t mask, uint16_t *base, vuint32m2_t bindex, + vuint16m1_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u16m2_m(vbool8_t mask, uint16_t *base, vuint32m4_t bindex, + vuint16m2_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u16m4_m(vbool4_t mask, uint16_t *base, vuint32m8_t bindex, + vuint16m4_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u16mf4_m(vbool64_t mask, uint16_t *base, + vuint64m1_t bindex, vuint16mf4_t value, + size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u16mf2_m(vbool32_t mask, uint16_t *base, + vuint64m2_t bindex, vuint16mf2_t value, + size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u16m1_m(vbool16_t mask, uint16_t *base, vuint64m4_t bindex, + vuint16m1_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u16m2_m(vbool8_t mask, uint16_t *base, vuint64m8_t bindex, + vuint16m2_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u32mf2_m(vbool64_t mask, uint32_t *base, vuint8mf8_t bindex, + vuint32mf2_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u32m1_m(vbool32_t mask, uint32_t *base, vuint8mf4_t bindex, + vuint32m1_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u32m2_m(vbool16_t mask, uint32_t *base, vuint8mf2_t bindex, + vuint32m2_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u32m4_m(vbool8_t mask, uint32_t *base, vuint8m1_t bindex, + vuint32m4_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u32m8_m(vbool4_t mask, uint32_t *base, vuint8m2_t bindex, + vuint32m8_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u32mf2_m(vbool64_t mask, uint32_t *base, + vuint16mf4_t bindex, vuint32mf2_t value, + size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u32m1_m(vbool32_t mask, uint32_t *base, + vuint16mf2_t bindex, vuint32m1_t value, + size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u32m2_m(vbool16_t mask, uint32_t *base, vuint16m1_t bindex, + vuint32m2_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u32m4_m(vbool8_t mask, uint32_t *base, vuint16m2_t bindex, + vuint32m4_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u32m8_m(vbool4_t mask, uint32_t *base, vuint16m4_t bindex, + vuint32m8_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u32mf2_m(vbool64_t mask, uint32_t *base, + vuint32mf2_t bindex, vuint32mf2_t value, + size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u32m1_m(vbool32_t mask, uint32_t *base, vuint32m1_t bindex, + vuint32m1_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u32m2_m(vbool16_t mask, uint32_t *base, vuint32m2_t bindex, + vuint32m2_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u32m4_m(vbool8_t mask, uint32_t *base, vuint32m4_t bindex, + vuint32m4_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u32m8_m(vbool4_t mask, uint32_t *base, vuint32m8_t bindex, + vuint32m8_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u32mf2_m(vbool64_t mask, uint32_t *base, + vuint64m1_t bindex, vuint32mf2_t value, + size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u32m1_m(vbool32_t mask, uint32_t *base, vuint64m2_t bindex, + vuint32m1_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u32m2_m(vbool16_t mask, uint32_t *base, vuint64m4_t bindex, + vuint32m2_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u32m4_m(vbool8_t mask, uint32_t *base, vuint64m8_t bindex, + vuint32m4_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u64m1_m(vbool64_t mask, uint64_t *base, vuint8mf8_t bindex, + vuint64m1_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u64m2_m(vbool32_t mask, uint64_t *base, vuint8mf4_t bindex, + vuint64m2_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u64m4_m(vbool16_t mask, uint64_t *base, vuint8mf2_t bindex, + vuint64m4_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u64m8_m(vbool8_t mask, uint64_t *base, vuint8m1_t bindex, + vuint64m8_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u64m1_m(vbool64_t mask, uint64_t *base, + vuint16mf4_t bindex, vuint64m1_t value, + size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u64m2_m(vbool32_t mask, uint64_t *base, + vuint16mf2_t bindex, vuint64m2_t value, + size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u64m4_m(vbool16_t mask, uint64_t *base, vuint16m1_t bindex, + vuint64m4_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u64m8_m(vbool8_t mask, uint64_t *base, vuint16m2_t bindex, + vuint64m8_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u64m1_m(vbool64_t mask, uint64_t *base, + vuint32mf2_t bindex, vuint64m1_t value, + size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u64m2_m(vbool32_t mask, uint64_t *base, vuint32m1_t bindex, + vuint64m2_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u64m4_m(vbool16_t mask, uint64_t *base, vuint32m2_t bindex, + vuint64m4_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u64m8_m(vbool8_t mask, uint64_t *base, vuint32m4_t bindex, + vuint64m8_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u64m1_m(vbool64_t mask, uint64_t *base, vuint64m1_t bindex, + vuint64m1_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u64m2_m(vbool32_t mask, uint64_t *base, vuint64m2_t bindex, + vuint64m2_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u64m4_m(vbool16_t mask, uint64_t *base, vuint64m4_t bindex, + vuint64m4_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u64m8_m(vbool8_t mask, uint64_t *base, vuint64m8_t bindex, + vuint64m8_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f32.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f32.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f32mf2_m(vbool64_t mask, float *base, vuint8mf8_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f32.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f32.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f32m1_m(vbool32_t mask, float *base, vuint8mf4_t bindex, + vfloat32m1_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f32.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f32.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f32m2_m(vbool16_t mask, float *base, vuint8mf2_t bindex, + vfloat32m2_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f32.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f32.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f32m4_m(vbool8_t mask, float *base, vuint8m1_t bindex, + vfloat32m4_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16f32.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16f32.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f32m8_m(vbool4_t mask, float *base, vuint8m2_t bindex, + vfloat32m8_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f32.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f32.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f32mf2_m(vbool64_t mask, float *base, vuint16mf4_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f32.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f32.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f32m1_m(vbool32_t mask, float *base, vuint16mf2_t bindex, + vfloat32m1_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f32.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f32.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f32m2_m(vbool16_t mask, float *base, vuint16m1_t bindex, + vfloat32m2_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f32.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f32.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f32m4_m(vbool8_t mask, float *base, vuint16m2_t bindex, + vfloat32m4_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16f32.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16f32.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f32m8_m(vbool4_t mask, float *base, vuint16m4_t bindex, + vfloat32m8_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f32.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f32.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f32mf2_m(vbool64_t mask, float *base, vuint32mf2_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f32.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f32.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f32m1_m(vbool32_t mask, float *base, vuint32m1_t bindex, + vfloat32m1_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f32.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f32.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f32m2_m(vbool16_t mask, float *base, vuint32m2_t bindex, + vfloat32m2_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f32.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f32.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f32m4_m(vbool8_t mask, float *base, vuint32m4_t bindex, + vfloat32m4_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16f32.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16f32.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f32m8_m(vbool4_t mask, float *base, vuint32m8_t bindex, + vfloat32m8_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f32.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f32.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f32mf2_m(vbool64_t mask, float *base, vuint64m1_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f32.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f32.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f32m1_m(vbool32_t mask, float *base, vuint64m2_t bindex, + vfloat32m1_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f32.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f32.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f32m2_m(vbool16_t mask, float *base, vuint64m4_t bindex, + vfloat32m2_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f32.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f32.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f32m4_m(vbool8_t mask, float *base, vuint64m8_t bindex, + vfloat32m4_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f64.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f64.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f64m1_m(vbool64_t mask, double *base, vuint8mf8_t bindex, + vfloat64m1_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f64.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f64.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f64m2_m(vbool32_t mask, double *base, vuint8mf4_t bindex, + vfloat64m2_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f64.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f64.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f64m4_m(vbool16_t mask, double *base, vuint8mf2_t bindex, + vfloat64m4_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f64.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f64.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f64m8_m(vbool8_t mask, double *base, vuint8m1_t bindex, + vfloat64m8_t value, size_t vl) { + return vsoxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f64.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f64.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f64m1_m(vbool64_t mask, double *base, vuint16mf4_t bindex, + vfloat64m1_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f64.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f64.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f64m2_m(vbool32_t mask, double *base, vuint16mf2_t bindex, + vfloat64m2_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f64.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f64.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f64m4_m(vbool16_t mask, double *base, vuint16m1_t bindex, + vfloat64m4_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f64.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f64.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f64m8_m(vbool8_t mask, double *base, vuint16m2_t bindex, + vfloat64m8_t value, size_t vl) { + return vsoxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f64.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f64.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f64m1_m(vbool64_t mask, double *base, vuint32mf2_t bindex, + vfloat64m1_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f64.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f64.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f64m2_m(vbool32_t mask, double *base, vuint32m1_t bindex, + vfloat64m2_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f64.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f64.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f64m4_m(vbool16_t mask, double *base, vuint32m2_t bindex, + vfloat64m4_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f64.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f64.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f64m8_m(vbool8_t mask, double *base, vuint32m4_t bindex, + vfloat64m8_t value, size_t vl) { + return vsoxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f64.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f64.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f64m1_m(vbool64_t mask, double *base, vuint64m1_t bindex, + vfloat64m1_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f64.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f64.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f64m2_m(vbool32_t mask, double *base, vuint64m2_t bindex, + vfloat64m2_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f64.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f64.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f64m4_m(vbool16_t mask, double *base, vuint64m4_t bindex, + vfloat64m4_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f64.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f64.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f64m8_m(vbool8_t mask, double *base, vuint64m8_t bindex, + vfloat64m8_t value, size_t vl) { + return vsoxei64(mask, base, bindex, value, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vsse.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vsse.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vsse.c @@ -0,0 +1,1813 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv32 -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV32 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -Werror -Wall -o - %s -S >/dev/null 2>&1 | FileCheck --check-prefix=ASM --allow-empty %s + +// ASM-NOT: warning +#include + +// CHECK-RV32-LABEL: @test_vsse8_v_i8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8:#.*]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8:#.*]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8mf8(int8_t *base, ptrdiff_t bstride, vint8mf8_t value, + size_t vl) { + return vsse8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8mf4(int8_t *base, ptrdiff_t bstride, vint8mf4_t value, + size_t vl) { + return vsse8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8mf2(int8_t *base, ptrdiff_t bstride, vint8mf2_t value, + size_t vl) { + return vsse8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8m1(int8_t *base, ptrdiff_t bstride, vint8m1_t value, + size_t vl) { + return vsse8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8m2(int8_t *base, ptrdiff_t bstride, vint8m2_t value, + size_t vl) { + return vsse8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8m4(int8_t *base, ptrdiff_t bstride, vint8m4_t value, + size_t vl) { + return vsse8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8m8(int8_t *base, ptrdiff_t bstride, vint8m8_t value, + size_t vl) { + return vsse8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_i16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_i16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_i16mf4(int16_t *base, ptrdiff_t bstride, vint16mf4_t value, + size_t vl) { + return vsse16(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_i16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_i16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_i16mf2(int16_t *base, ptrdiff_t bstride, vint16mf2_t value, + size_t vl) { + return vsse16(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_i16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_i16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_i16m1(int16_t *base, ptrdiff_t bstride, vint16m1_t value, + size_t vl) { + return vsse16(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_i16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_i16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_i16m2(int16_t *base, ptrdiff_t bstride, vint16m2_t value, + size_t vl) { + return vsse16(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_i16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_i16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_i16m4(int16_t *base, ptrdiff_t bstride, vint16m4_t value, + size_t vl) { + return vsse16(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_i16m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_i16m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_i16m8(int16_t *base, ptrdiff_t bstride, vint16m8_t value, + size_t vl) { + return vsse16(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_i32mf2(int32_t *base, ptrdiff_t bstride, vint32mf2_t value, + size_t vl) { + return vsse32(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_i32m1(int32_t *base, ptrdiff_t bstride, vint32m1_t value, + size_t vl) { + return vsse32(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_i32m2(int32_t *base, ptrdiff_t bstride, vint32m2_t value, + size_t vl) { + return vsse32(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_i32m4(int32_t *base, ptrdiff_t bstride, vint32m4_t value, + size_t vl) { + return vsse32(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_i32m8(int32_t *base, ptrdiff_t bstride, vint32m8_t value, + size_t vl) { + return vsse32(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_i64m1(int64_t *base, ptrdiff_t bstride, vint64m1_t value, + size_t vl) { + return vsse64(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_i64m2(int64_t *base, ptrdiff_t bstride, vint64m2_t value, + size_t vl) { + return vsse64(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_i64m4(int64_t *base, ptrdiff_t bstride, vint64m4_t value, + size_t vl) { + return vsse64(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_i64m8(int64_t *base, ptrdiff_t bstride, vint64m8_t value, + size_t vl) { + return vsse64(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8mf8(uint8_t *base, ptrdiff_t bstride, vuint8mf8_t value, + size_t vl) { + return vsse8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8mf4(uint8_t *base, ptrdiff_t bstride, vuint8mf4_t value, + size_t vl) { + return vsse8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8mf2(uint8_t *base, ptrdiff_t bstride, vuint8mf2_t value, + size_t vl) { + return vsse8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8m1(uint8_t *base, ptrdiff_t bstride, vuint8m1_t value, + size_t vl) { + return vsse8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8m2(uint8_t *base, ptrdiff_t bstride, vuint8m2_t value, + size_t vl) { + return vsse8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8m4(uint8_t *base, ptrdiff_t bstride, vuint8m4_t value, + size_t vl) { + return vsse8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8m8(uint8_t *base, ptrdiff_t bstride, vuint8m8_t value, + size_t vl) { + return vsse8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_u16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_u16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_u16mf4(uint16_t *base, ptrdiff_t bstride, vuint16mf4_t value, + size_t vl) { + return vsse16(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_u16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_u16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_u16mf2(uint16_t *base, ptrdiff_t bstride, vuint16mf2_t value, + size_t vl) { + return vsse16(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_u16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_u16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_u16m1(uint16_t *base, ptrdiff_t bstride, vuint16m1_t value, + size_t vl) { + return vsse16(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_u16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_u16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_u16m2(uint16_t *base, ptrdiff_t bstride, vuint16m2_t value, + size_t vl) { + return vsse16(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_u16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_u16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_u16m4(uint16_t *base, ptrdiff_t bstride, vuint16m4_t value, + size_t vl) { + return vsse16(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_u16m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_u16m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_u16m8(uint16_t *base, ptrdiff_t bstride, vuint16m8_t value, + size_t vl) { + return vsse16(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_u32mf2(uint32_t *base, ptrdiff_t bstride, vuint32mf2_t value, + size_t vl) { + return vsse32(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_u32m1(uint32_t *base, ptrdiff_t bstride, vuint32m1_t value, + size_t vl) { + return vsse32(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_u32m2(uint32_t *base, ptrdiff_t bstride, vuint32m2_t value, + size_t vl) { + return vsse32(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_u32m4(uint32_t *base, ptrdiff_t bstride, vuint32m4_t value, + size_t vl) { + return vsse32(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_u32m8(uint32_t *base, ptrdiff_t bstride, vuint32m8_t value, + size_t vl) { + return vsse32(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_u64m1(uint64_t *base, ptrdiff_t bstride, vuint64m1_t value, + size_t vl) { + return vsse64(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_u64m2(uint64_t *base, ptrdiff_t bstride, vuint64m2_t value, + size_t vl) { + return vsse64(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_u64m4(uint64_t *base, ptrdiff_t bstride, vuint64m4_t value, + size_t vl) { + return vsse64(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_u64m8(uint64_t *base, ptrdiff_t bstride, vuint64m8_t value, + size_t vl) { + return vsse64(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv1f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv1f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_f32mf2(float *base, ptrdiff_t bstride, vfloat32mf2_t value, + size_t vl) { + return vsse32(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv2f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv2f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_f32m1(float *base, ptrdiff_t bstride, vfloat32m1_t value, + size_t vl) { + return vsse32(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv4f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv4f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_f32m2(float *base, ptrdiff_t bstride, vfloat32m2_t value, + size_t vl) { + return vsse32(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv8f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv8f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_f32m4(float *base, ptrdiff_t bstride, vfloat32m4_t value, + size_t vl) { + return vsse32(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_f32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv16f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_f32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv16f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_f32m8(float *base, ptrdiff_t bstride, vfloat32m8_t value, + size_t vl) { + return vsse32(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv1f64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv1f64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_f64m1(double *base, ptrdiff_t bstride, vfloat64m1_t value, + size_t vl) { + return vsse64(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv2f64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv2f64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_f64m2(double *base, ptrdiff_t bstride, vfloat64m2_t value, + size_t vl) { + return vsse64(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv4f64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv4f64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_f64m4(double *base, ptrdiff_t bstride, vfloat64m4_t value, + size_t vl) { + return vsse64(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv8f64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv8f64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_f64m8(double *base, ptrdiff_t bstride, vfloat64m8_t value, + size_t vl) { + return vsse64(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8mf8_m(vbool64_t mask, int8_t *base, ptrdiff_t bstride, + vint8mf8_t value, size_t vl) { + return vsse8(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8mf4_m(vbool32_t mask, int8_t *base, ptrdiff_t bstride, + vint8mf4_t value, size_t vl) { + return vsse8(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8mf2_m(vbool16_t mask, int8_t *base, ptrdiff_t bstride, + vint8mf2_t value, size_t vl) { + return vsse8(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8m1_m(vbool8_t mask, int8_t *base, ptrdiff_t bstride, + vint8m1_t value, size_t vl) { + return vsse8(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8m2_m(vbool4_t mask, int8_t *base, ptrdiff_t bstride, + vint8m2_t value, size_t vl) { + return vsse8(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8m4_m(vbool2_t mask, int8_t *base, ptrdiff_t bstride, + vint8m4_t value, size_t vl) { + return vsse8(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8m8_m(vbool1_t mask, int8_t *base, ptrdiff_t bstride, + vint8m8_t value, size_t vl) { + return vsse8(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_i16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_i16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_i16mf4_m(vbool64_t mask, int16_t *base, ptrdiff_t bstride, + vint16mf4_t value, size_t vl) { + return vsse16(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_i16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_i16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_i16mf2_m(vbool32_t mask, int16_t *base, ptrdiff_t bstride, + vint16mf2_t value, size_t vl) { + return vsse16(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_i16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_i16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_i16m1_m(vbool16_t mask, int16_t *base, ptrdiff_t bstride, + vint16m1_t value, size_t vl) { + return vsse16(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_i16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_i16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_i16m2_m(vbool8_t mask, int16_t *base, ptrdiff_t bstride, + vint16m2_t value, size_t vl) { + return vsse16(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_i16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_i16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_i16m4_m(vbool4_t mask, int16_t *base, ptrdiff_t bstride, + vint16m4_t value, size_t vl) { + return vsse16(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_i16m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_i16m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_i16m8_m(vbool2_t mask, int16_t *base, ptrdiff_t bstride, + vint16m8_t value, size_t vl) { + return vsse16(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_i32mf2_m(vbool64_t mask, int32_t *base, ptrdiff_t bstride, + vint32mf2_t value, size_t vl) { + return vsse32(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_i32m1_m(vbool32_t mask, int32_t *base, ptrdiff_t bstride, + vint32m1_t value, size_t vl) { + return vsse32(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_i32m2_m(vbool16_t mask, int32_t *base, ptrdiff_t bstride, + vint32m2_t value, size_t vl) { + return vsse32(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_i32m4_m(vbool8_t mask, int32_t *base, ptrdiff_t bstride, + vint32m4_t value, size_t vl) { + return vsse32(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_i32m8_m(vbool4_t mask, int32_t *base, ptrdiff_t bstride, + vint32m8_t value, size_t vl) { + return vsse32(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_i64m1_m(vbool64_t mask, int64_t *base, ptrdiff_t bstride, + vint64m1_t value, size_t vl) { + return vsse64(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_i64m2_m(vbool32_t mask, int64_t *base, ptrdiff_t bstride, + vint64m2_t value, size_t vl) { + return vsse64(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_i64m4_m(vbool16_t mask, int64_t *base, ptrdiff_t bstride, + vint64m4_t value, size_t vl) { + return vsse64(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_i64m8_m(vbool8_t mask, int64_t *base, ptrdiff_t bstride, + vint64m8_t value, size_t vl) { + return vsse64(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8mf8_m(vbool64_t mask, uint8_t *base, ptrdiff_t bstride, + vuint8mf8_t value, size_t vl) { + return vsse8(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8mf4_m(vbool32_t mask, uint8_t *base, ptrdiff_t bstride, + vuint8mf4_t value, size_t vl) { + return vsse8(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8mf2_m(vbool16_t mask, uint8_t *base, ptrdiff_t bstride, + vuint8mf2_t value, size_t vl) { + return vsse8(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8m1_m(vbool8_t mask, uint8_t *base, ptrdiff_t bstride, + vuint8m1_t value, size_t vl) { + return vsse8(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8m2_m(vbool4_t mask, uint8_t *base, ptrdiff_t bstride, + vuint8m2_t value, size_t vl) { + return vsse8(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8m4_m(vbool2_t mask, uint8_t *base, ptrdiff_t bstride, + vuint8m4_t value, size_t vl) { + return vsse8(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8m8_m(vbool1_t mask, uint8_t *base, ptrdiff_t bstride, + vuint8m8_t value, size_t vl) { + return vsse8(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_u16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_u16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_u16mf4_m(vbool64_t mask, uint16_t *base, ptrdiff_t bstride, + vuint16mf4_t value, size_t vl) { + return vsse16(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_u16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_u16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_u16mf2_m(vbool32_t mask, uint16_t *base, ptrdiff_t bstride, + vuint16mf2_t value, size_t vl) { + return vsse16(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_u16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_u16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_u16m1_m(vbool16_t mask, uint16_t *base, ptrdiff_t bstride, + vuint16m1_t value, size_t vl) { + return vsse16(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_u16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_u16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_u16m2_m(vbool8_t mask, uint16_t *base, ptrdiff_t bstride, + vuint16m2_t value, size_t vl) { + return vsse16(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_u16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_u16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_u16m4_m(vbool4_t mask, uint16_t *base, ptrdiff_t bstride, + vuint16m4_t value, size_t vl) { + return vsse16(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_u16m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_u16m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_u16m8_m(vbool2_t mask, uint16_t *base, ptrdiff_t bstride, + vuint16m8_t value, size_t vl) { + return vsse16(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_u32mf2_m(vbool64_t mask, uint32_t *base, ptrdiff_t bstride, + vuint32mf2_t value, size_t vl) { + return vsse32(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_u32m1_m(vbool32_t mask, uint32_t *base, ptrdiff_t bstride, + vuint32m1_t value, size_t vl) { + return vsse32(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_u32m2_m(vbool16_t mask, uint32_t *base, ptrdiff_t bstride, + vuint32m2_t value, size_t vl) { + return vsse32(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_u32m4_m(vbool8_t mask, uint32_t *base, ptrdiff_t bstride, + vuint32m4_t value, size_t vl) { + return vsse32(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_u32m8_m(vbool4_t mask, uint32_t *base, ptrdiff_t bstride, + vuint32m8_t value, size_t vl) { + return vsse32(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_u64m1_m(vbool64_t mask, uint64_t *base, ptrdiff_t bstride, + vuint64m1_t value, size_t vl) { + return vsse64(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_u64m2_m(vbool32_t mask, uint64_t *base, ptrdiff_t bstride, + vuint64m2_t value, size_t vl) { + return vsse64(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_u64m4_m(vbool16_t mask, uint64_t *base, ptrdiff_t bstride, + vuint64m4_t value, size_t vl) { + return vsse64(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_u64m8_m(vbool8_t mask, uint64_t *base, ptrdiff_t bstride, + vuint64m8_t value, size_t vl) { + return vsse64(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv1f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv1f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_f32mf2_m(vbool64_t mask, float *base, ptrdiff_t bstride, + vfloat32mf2_t value, size_t vl) { + return vsse32(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv2f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv2f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_f32m1_m(vbool32_t mask, float *base, ptrdiff_t bstride, + vfloat32m1_t value, size_t vl) { + return vsse32(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv4f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv4f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_f32m2_m(vbool16_t mask, float *base, ptrdiff_t bstride, + vfloat32m2_t value, size_t vl) { + return vsse32(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv8f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv8f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_f32m4_m(vbool8_t mask, float *base, ptrdiff_t bstride, + vfloat32m4_t value, size_t vl) { + return vsse32(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_f32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv16f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_f32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv16f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_f32m8_m(vbool4_t mask, float *base, ptrdiff_t bstride, + vfloat32m8_t value, size_t vl) { + return vsse32(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv1f64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv1f64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_f64m1_m(vbool64_t mask, double *base, ptrdiff_t bstride, + vfloat64m1_t value, size_t vl) { + return vsse64(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv2f64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv2f64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_f64m2_m(vbool32_t mask, double *base, ptrdiff_t bstride, + vfloat64m2_t value, size_t vl) { + return vsse64(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv4f64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv4f64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_f64m4_m(vbool16_t mask, double *base, ptrdiff_t bstride, + vfloat64m4_t value, size_t vl) { + return vsse64(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv8f64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv8f64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_f64m8_m(vbool8_t mask, double *base, ptrdiff_t bstride, + vfloat64m8_t value, size_t vl) { + return vsse64(mask, base, bstride, value, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vsuxei.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vsuxei.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vsuxei.c @@ -0,0 +1,6523 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv32 -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV32 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -Werror -Wall -o - %s -S >/dev/null 2>&1 | FileCheck --check-prefix=ASM --allow-empty %s + +// ASM-NOT: warning +#include + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8:#.*]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8:#.*]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8mf8(int8_t *base, vuint8mf8_t bindex, vint8mf8_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8mf4(int8_t *base, vuint8mf4_t bindex, vint8mf4_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8mf2(int8_t *base, vuint8mf2_t bindex, vint8mf2_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8m1(int8_t *base, vuint8m1_t bindex, vint8m1_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i8.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i8.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8m2(int8_t *base, vuint8m2_t bindex, vint8m2_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv32i8.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv32i8.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8m4(int8_t *base, vuint8m4_t bindex, vint8m4_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv64i8.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv64i8.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8m8(int8_t *base, vuint8m8_t bindex, vint8m8_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i8mf8(int8_t *base, vuint16mf4_t bindex, vint8mf8_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i8mf4(int8_t *base, vuint16mf2_t bindex, vint8mf4_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i8mf2(int8_t *base, vuint16m1_t bindex, vint8mf2_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i8m1(int8_t *base, vuint16m2_t bindex, vint8m1_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i8.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i8.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i8m2(int8_t *base, vuint16m4_t bindex, vint8m2_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i8m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv32i8.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i8m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv32i8.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i8m4(int8_t *base, vuint16m8_t bindex, vint8m4_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i8mf8(int8_t *base, vuint32mf2_t bindex, vint8mf8_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i8mf4(int8_t *base, vuint32m1_t bindex, vint8mf4_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i8mf2(int8_t *base, vuint32m2_t bindex, vint8mf2_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i8m1(int8_t *base, vuint32m4_t bindex, vint8m1_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i8.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i8.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i8m2(int8_t *base, vuint32m8_t bindex, vint8m2_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i8mf8(int8_t *base, vuint64m1_t bindex, vint8mf8_t value, + size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i8mf4(int8_t *base, vuint64m2_t bindex, vint8mf4_t value, + size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i8mf2(int8_t *base, vuint64m4_t bindex, vint8mf2_t value, + size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i8m1(int8_t *base, vuint64m8_t bindex, vint8m1_t value, + size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i16mf4(int16_t *base, vuint8mf8_t bindex, vint16mf4_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i16mf2(int16_t *base, vuint8mf4_t bindex, vint16mf2_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i16m1(int16_t *base, vuint8mf2_t bindex, vint16m1_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i16m2(int16_t *base, vuint8m1_t bindex, vint16m2_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i16.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i16.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i16m4(int16_t *base, vuint8m2_t bindex, vint16m4_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i16m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv32i16.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i16m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv32i16.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i16m8(int16_t *base, vuint8m4_t bindex, vint16m8_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i16mf4(int16_t *base, vuint16mf4_t bindex, + vint16mf4_t value, size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i16mf2(int16_t *base, vuint16mf2_t bindex, + vint16mf2_t value, size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i16m1(int16_t *base, vuint16m1_t bindex, vint16m1_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i16m2(int16_t *base, vuint16m2_t bindex, vint16m2_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i16.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i16.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i16m4(int16_t *base, vuint16m4_t bindex, vint16m4_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i16m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv32i16.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i16m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv32i16.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i16m8(int16_t *base, vuint16m8_t bindex, vint16m8_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i16mf4(int16_t *base, vuint32mf2_t bindex, + vint16mf4_t value, size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i16mf2(int16_t *base, vuint32m1_t bindex, + vint16mf2_t value, size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i16m1(int16_t *base, vuint32m2_t bindex, vint16m1_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i16m2(int16_t *base, vuint32m4_t bindex, vint16m2_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i16.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i16.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i16m4(int16_t *base, vuint32m8_t bindex, vint16m4_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i16mf4(int16_t *base, vuint64m1_t bindex, + vint16mf4_t value, size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i16mf2(int16_t *base, vuint64m2_t bindex, + vint16mf2_t value, size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i16m1(int16_t *base, vuint64m4_t bindex, vint16m1_t value, + size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i16m2(int16_t *base, vuint64m8_t bindex, vint16m2_t value, + size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i32mf2(int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i32m1(int32_t *base, vuint8mf4_t bindex, vint32m1_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i32m2(int32_t *base, vuint8mf2_t bindex, vint32m2_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i32m4(int32_t *base, vuint8m1_t bindex, vint32m4_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i32.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i32.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i32m8(int32_t *base, vuint8m2_t bindex, vint32m8_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i32mf2(int32_t *base, vuint16mf4_t bindex, + vint32mf2_t value, size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i32m1(int32_t *base, vuint16mf2_t bindex, vint32m1_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i32m2(int32_t *base, vuint16m1_t bindex, vint32m2_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i32m4(int32_t *base, vuint16m2_t bindex, vint32m4_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i32.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i32.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i32m8(int32_t *base, vuint16m4_t bindex, vint32m8_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i32mf2(int32_t *base, vuint32mf2_t bindex, + vint32mf2_t value, size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i32m1(int32_t *base, vuint32m1_t bindex, vint32m1_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i32m2(int32_t *base, vuint32m2_t bindex, vint32m2_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i32m4(int32_t *base, vuint32m4_t bindex, vint32m4_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i32.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i32.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i32m8(int32_t *base, vuint32m8_t bindex, vint32m8_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i32mf2(int32_t *base, vuint64m1_t bindex, + vint32mf2_t value, size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i32m1(int32_t *base, vuint64m2_t bindex, vint32m1_t value, + size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i32m2(int32_t *base, vuint64m4_t bindex, vint32m2_t value, + size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i32m4(int32_t *base, vuint64m8_t bindex, vint32m4_t value, + size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i64m1(int64_t *base, vuint8mf8_t bindex, vint64m1_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i64m2(int64_t *base, vuint8mf4_t bindex, vint64m2_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i64m4(int64_t *base, vuint8mf2_t bindex, vint64m4_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i64m8(int64_t *base, vuint8m1_t bindex, vint64m8_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i64m1(int64_t *base, vuint16mf4_t bindex, vint64m1_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i64m2(int64_t *base, vuint16mf2_t bindex, vint64m2_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i64m4(int64_t *base, vuint16m1_t bindex, vint64m4_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i64m8(int64_t *base, vuint16m2_t bindex, vint64m8_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i64m1(int64_t *base, vuint32mf2_t bindex, vint64m1_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i64m2(int64_t *base, vuint32m1_t bindex, vint64m2_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i64m4(int64_t *base, vuint32m2_t bindex, vint64m4_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i64m8(int64_t *base, vuint32m4_t bindex, vint64m8_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i64m1(int64_t *base, vuint64m1_t bindex, vint64m1_t value, + size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i64m2(int64_t *base, vuint64m2_t bindex, vint64m2_t value, + size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i64m4(int64_t *base, vuint64m4_t bindex, vint64m4_t value, + size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i64m8(int64_t *base, vuint64m8_t bindex, vint64m8_t value, + size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8mf8(uint8_t *base, vuint8mf8_t bindex, vuint8mf8_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8mf4(uint8_t *base, vuint8mf4_t bindex, vuint8mf4_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8mf2(uint8_t *base, vuint8mf2_t bindex, vuint8mf2_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8m1(uint8_t *base, vuint8m1_t bindex, vuint8m1_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i8.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i8.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8m2(uint8_t *base, vuint8m2_t bindex, vuint8m2_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv32i8.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv32i8.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8m4(uint8_t *base, vuint8m4_t bindex, vuint8m4_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv64i8.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv64i8.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8m8(uint8_t *base, vuint8m8_t bindex, vuint8m8_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u8mf8(uint8_t *base, vuint16mf4_t bindex, + vuint8mf8_t value, size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u8mf4(uint8_t *base, vuint16mf2_t bindex, + vuint8mf4_t value, size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u8mf2(uint8_t *base, vuint16m1_t bindex, vuint8mf2_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u8m1(uint8_t *base, vuint16m2_t bindex, vuint8m1_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i8.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i8.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u8m2(uint8_t *base, vuint16m4_t bindex, vuint8m2_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u8m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv32i8.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u8m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv32i8.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u8m4(uint8_t *base, vuint16m8_t bindex, vuint8m4_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u8mf8(uint8_t *base, vuint32mf2_t bindex, + vuint8mf8_t value, size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u8mf4(uint8_t *base, vuint32m1_t bindex, vuint8mf4_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u8mf2(uint8_t *base, vuint32m2_t bindex, vuint8mf2_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u8m1(uint8_t *base, vuint32m4_t bindex, vuint8m1_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i8.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i8.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u8m2(uint8_t *base, vuint32m8_t bindex, vuint8m2_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u8mf8(uint8_t *base, vuint64m1_t bindex, vuint8mf8_t value, + size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u8mf4(uint8_t *base, vuint64m2_t bindex, vuint8mf4_t value, + size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u8mf2(uint8_t *base, vuint64m4_t bindex, vuint8mf2_t value, + size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u8m1(uint8_t *base, vuint64m8_t bindex, vuint8m1_t value, + size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u16mf4(uint16_t *base, vuint8mf8_t bindex, + vuint16mf4_t value, size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u16mf2(uint16_t *base, vuint8mf4_t bindex, + vuint16mf2_t value, size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u16m1(uint16_t *base, vuint8mf2_t bindex, vuint16m1_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u16m2(uint16_t *base, vuint8m1_t bindex, vuint16m2_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i16.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i16.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u16m4(uint16_t *base, vuint8m2_t bindex, vuint16m4_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u16m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv32i16.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u16m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv32i16.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u16m8(uint16_t *base, vuint8m4_t bindex, vuint16m8_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u16mf4(uint16_t *base, vuint16mf4_t bindex, + vuint16mf4_t value, size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u16mf2(uint16_t *base, vuint16mf2_t bindex, + vuint16mf2_t value, size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u16m1(uint16_t *base, vuint16m1_t bindex, + vuint16m1_t value, size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u16m2(uint16_t *base, vuint16m2_t bindex, + vuint16m2_t value, size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i16.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i16.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u16m4(uint16_t *base, vuint16m4_t bindex, + vuint16m4_t value, size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u16m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv32i16.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u16m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv32i16.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u16m8(uint16_t *base, vuint16m8_t bindex, + vuint16m8_t value, size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u16mf4(uint16_t *base, vuint32mf2_t bindex, + vuint16mf4_t value, size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u16mf2(uint16_t *base, vuint32m1_t bindex, + vuint16mf2_t value, size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u16m1(uint16_t *base, vuint32m2_t bindex, + vuint16m1_t value, size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u16m2(uint16_t *base, vuint32m4_t bindex, + vuint16m2_t value, size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i16.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i16.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u16m4(uint16_t *base, vuint32m8_t bindex, + vuint16m4_t value, size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u16mf4(uint16_t *base, vuint64m1_t bindex, + vuint16mf4_t value, size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u16mf2(uint16_t *base, vuint64m2_t bindex, + vuint16mf2_t value, size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u16m1(uint16_t *base, vuint64m4_t bindex, + vuint16m1_t value, size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u16m2(uint16_t *base, vuint64m8_t bindex, + vuint16m2_t value, size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u32mf2(uint32_t *base, vuint8mf8_t bindex, + vuint32mf2_t value, size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u32m1(uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u32m2(uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u32m4(uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i32.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i32.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u32m8(uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u32mf2(uint32_t *base, vuint16mf4_t bindex, + vuint32mf2_t value, size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u32m1(uint32_t *base, vuint16mf2_t bindex, + vuint32m1_t value, size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u32m2(uint32_t *base, vuint16m1_t bindex, + vuint32m2_t value, size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u32m4(uint32_t *base, vuint16m2_t bindex, + vuint32m4_t value, size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i32.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i32.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u32m8(uint32_t *base, vuint16m4_t bindex, + vuint32m8_t value, size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u32mf2(uint32_t *base, vuint32mf2_t bindex, + vuint32mf2_t value, size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u32m1(uint32_t *base, vuint32m1_t bindex, + vuint32m1_t value, size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u32m2(uint32_t *base, vuint32m2_t bindex, + vuint32m2_t value, size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u32m4(uint32_t *base, vuint32m4_t bindex, + vuint32m4_t value, size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i32.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i32.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u32m8(uint32_t *base, vuint32m8_t bindex, + vuint32m8_t value, size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u32mf2(uint32_t *base, vuint64m1_t bindex, + vuint32mf2_t value, size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u32m1(uint32_t *base, vuint64m2_t bindex, + vuint32m1_t value, size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u32m2(uint32_t *base, vuint64m4_t bindex, + vuint32m2_t value, size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u32m4(uint32_t *base, vuint64m8_t bindex, + vuint32m4_t value, size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u64m1(uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u64m2(uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u64m4(uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u64m8(uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u64m1(uint64_t *base, vuint16mf4_t bindex, + vuint64m1_t value, size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u64m2(uint64_t *base, vuint16mf2_t bindex, + vuint64m2_t value, size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u64m4(uint64_t *base, vuint16m1_t bindex, + vuint64m4_t value, size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u64m8(uint64_t *base, vuint16m2_t bindex, + vuint64m8_t value, size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u64m1(uint64_t *base, vuint32mf2_t bindex, + vuint64m1_t value, size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u64m2(uint64_t *base, vuint32m1_t bindex, + vuint64m2_t value, size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u64m4(uint64_t *base, vuint32m2_t bindex, + vuint64m4_t value, size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u64m8(uint64_t *base, vuint32m4_t bindex, + vuint64m8_t value, size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u64m1(uint64_t *base, vuint64m1_t bindex, + vuint64m1_t value, size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u64m2(uint64_t *base, vuint64m2_t bindex, + vuint64m2_t value, size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u64m4(uint64_t *base, vuint64m4_t bindex, + vuint64m4_t value, size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u64m8(uint64_t *base, vuint64m8_t bindex, + vuint64m8_t value, size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1f32.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1f32.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f32mf2(float *base, vuint8mf8_t bindex, vfloat32mf2_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2f32.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2f32.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f32m1(float *base, vuint8mf4_t bindex, vfloat32m1_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4f32.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4f32.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f32m2(float *base, vuint8mf2_t bindex, vfloat32m2_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8f32.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8f32.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f32m4(float *base, vuint8m1_t bindex, vfloat32m4_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16f32.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16f32.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f32m8(float *base, vuint8m2_t bindex, vfloat32m8_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1f32.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1f32.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f32mf2(float *base, vuint16mf4_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2f32.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2f32.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f32m1(float *base, vuint16mf2_t bindex, vfloat32m1_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4f32.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4f32.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f32m2(float *base, vuint16m1_t bindex, vfloat32m2_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8f32.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8f32.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f32m4(float *base, vuint16m2_t bindex, vfloat32m4_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16f32.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16f32.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f32m8(float *base, vuint16m4_t bindex, vfloat32m8_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1f32.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1f32.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f32mf2(float *base, vuint32mf2_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2f32.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2f32.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f32m1(float *base, vuint32m1_t bindex, vfloat32m1_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4f32.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4f32.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f32m2(float *base, vuint32m2_t bindex, vfloat32m2_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8f32.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8f32.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f32m4(float *base, vuint32m4_t bindex, vfloat32m4_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16f32.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16f32.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f32m8(float *base, vuint32m8_t bindex, vfloat32m8_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1f32.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1f32.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f32mf2(float *base, vuint64m1_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2f32.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2f32.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f32m1(float *base, vuint64m2_t bindex, vfloat32m1_t value, + size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4f32.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4f32.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f32m2(float *base, vuint64m4_t bindex, vfloat32m2_t value, + size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8f32.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8f32.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f32m4(float *base, vuint64m8_t bindex, vfloat32m4_t value, + size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1f64.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1f64.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f64m1(double *base, vuint8mf8_t bindex, vfloat64m1_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2f64.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2f64.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f64m2(double *base, vuint8mf4_t bindex, vfloat64m2_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4f64.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4f64.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f64m4(double *base, vuint8mf2_t bindex, vfloat64m4_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8f64.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8f64.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f64m8(double *base, vuint8m1_t bindex, vfloat64m8_t value, + size_t vl) { + return vsuxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1f64.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1f64.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f64m1(double *base, vuint16mf4_t bindex, + vfloat64m1_t value, size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2f64.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2f64.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f64m2(double *base, vuint16mf2_t bindex, + vfloat64m2_t value, size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4f64.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4f64.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f64m4(double *base, vuint16m1_t bindex, vfloat64m4_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8f64.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8f64.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f64m8(double *base, vuint16m2_t bindex, vfloat64m8_t value, + size_t vl) { + return vsuxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1f64.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1f64.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f64m1(double *base, vuint32mf2_t bindex, + vfloat64m1_t value, size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2f64.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2f64.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f64m2(double *base, vuint32m1_t bindex, vfloat64m2_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4f64.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4f64.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f64m4(double *base, vuint32m2_t bindex, vfloat64m4_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8f64.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8f64.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f64m8(double *base, vuint32m4_t bindex, vfloat64m8_t value, + size_t vl) { + return vsuxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1f64.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1f64.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f64m1(double *base, vuint64m1_t bindex, vfloat64m1_t value, + size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2f64.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2f64.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f64m2(double *base, vuint64m2_t bindex, vfloat64m2_t value, + size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4f64.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4f64.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f64m4(double *base, vuint64m4_t bindex, vfloat64m4_t value, + size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8f64.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8f64.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f64m8(double *base, vuint64m8_t bindex, vfloat64m8_t value, + size_t vl) { + return vsuxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8mf8_m(vbool64_t mask, int8_t *base, vuint8mf8_t bindex, + vint8mf8_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8mf4_m(vbool32_t mask, int8_t *base, vuint8mf4_t bindex, + vint8mf4_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8mf2_m(vbool16_t mask, int8_t *base, vuint8mf2_t bindex, + vint8mf2_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8m1_m(vbool8_t mask, int8_t *base, vuint8m1_t bindex, + vint8m1_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i8.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i8.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8m2_m(vbool4_t mask, int8_t *base, vuint8m2_t bindex, + vint8m2_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i8.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i8.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8m4_m(vbool2_t mask, int8_t *base, vuint8m4_t bindex, + vint8m4_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv64i8.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv64i8.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8m8_m(vbool1_t mask, int8_t *base, vuint8m8_t bindex, + vint8m8_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i8mf8_m(vbool64_t mask, int8_t *base, vuint16mf4_t bindex, + vint8mf8_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i8mf4_m(vbool32_t mask, int8_t *base, vuint16mf2_t bindex, + vint8mf4_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i8mf2_m(vbool16_t mask, int8_t *base, vuint16m1_t bindex, + vint8mf2_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i8m1_m(vbool8_t mask, int8_t *base, vuint16m2_t bindex, + vint8m1_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i8.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i8.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i8m2_m(vbool4_t mask, int8_t *base, vuint16m4_t bindex, + vint8m2_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i8m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i8.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i8m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i8.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i8m4_m(vbool2_t mask, int8_t *base, vuint16m8_t bindex, + vint8m4_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i8mf8_m(vbool64_t mask, int8_t *base, vuint32mf2_t bindex, + vint8mf8_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i8mf4_m(vbool32_t mask, int8_t *base, vuint32m1_t bindex, + vint8mf4_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i8mf2_m(vbool16_t mask, int8_t *base, vuint32m2_t bindex, + vint8mf2_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i8m1_m(vbool8_t mask, int8_t *base, vuint32m4_t bindex, + vint8m1_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i8.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i8.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i8m2_m(vbool4_t mask, int8_t *base, vuint32m8_t bindex, + vint8m2_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i8mf8_m(vbool64_t mask, int8_t *base, vuint64m1_t bindex, + vint8mf8_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i8mf4_m(vbool32_t mask, int8_t *base, vuint64m2_t bindex, + vint8mf4_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i8mf2_m(vbool16_t mask, int8_t *base, vuint64m4_t bindex, + vint8mf2_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i8m1_m(vbool8_t mask, int8_t *base, vuint64m8_t bindex, + vint8m1_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i16mf4_m(vbool64_t mask, int16_t *base, vuint8mf8_t bindex, + vint16mf4_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i16mf2_m(vbool32_t mask, int16_t *base, vuint8mf4_t bindex, + vint16mf2_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i16m1_m(vbool16_t mask, int16_t *base, vuint8mf2_t bindex, + vint16m1_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i16m2_m(vbool8_t mask, int16_t *base, vuint8m1_t bindex, + vint16m2_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i16.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i16.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i16m4_m(vbool4_t mask, int16_t *base, vuint8m2_t bindex, + vint16m4_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i16m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i16.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i16m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i16.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i16m8_m(vbool2_t mask, int16_t *base, vuint8m4_t bindex, + vint16m8_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i16mf4_m(vbool64_t mask, int16_t *base, + vuint16mf4_t bindex, vint16mf4_t value, + size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i16mf2_m(vbool32_t mask, int16_t *base, + vuint16mf2_t bindex, vint16mf2_t value, + size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i16m1_m(vbool16_t mask, int16_t *base, vuint16m1_t bindex, + vint16m1_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i16m2_m(vbool8_t mask, int16_t *base, vuint16m2_t bindex, + vint16m2_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i16.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i16.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i16m4_m(vbool4_t mask, int16_t *base, vuint16m4_t bindex, + vint16m4_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i16m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i16.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i16m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i16.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i16m8_m(vbool2_t mask, int16_t *base, vuint16m8_t bindex, + vint16m8_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i16mf4_m(vbool64_t mask, int16_t *base, + vuint32mf2_t bindex, vint16mf4_t value, + size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i16mf2_m(vbool32_t mask, int16_t *base, vuint32m1_t bindex, + vint16mf2_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i16m1_m(vbool16_t mask, int16_t *base, vuint32m2_t bindex, + vint16m1_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i16m2_m(vbool8_t mask, int16_t *base, vuint32m4_t bindex, + vint16m2_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i16.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i16.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i16m4_m(vbool4_t mask, int16_t *base, vuint32m8_t bindex, + vint16m4_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i16mf4_m(vbool64_t mask, int16_t *base, vuint64m1_t bindex, + vint16mf4_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i16mf2_m(vbool32_t mask, int16_t *base, vuint64m2_t bindex, + vint16mf2_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i16m1_m(vbool16_t mask, int16_t *base, vuint64m4_t bindex, + vint16m1_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i16m2_m(vbool8_t mask, int16_t *base, vuint64m8_t bindex, + vint16m2_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i32mf2_m(vbool64_t mask, int32_t *base, vuint8mf8_t bindex, + vint32mf2_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i32m1_m(vbool32_t mask, int32_t *base, vuint8mf4_t bindex, + vint32m1_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i32m2_m(vbool16_t mask, int32_t *base, vuint8mf2_t bindex, + vint32m2_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i32m4_m(vbool8_t mask, int32_t *base, vuint8m1_t bindex, + vint32m4_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i32.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i32.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i32m8_m(vbool4_t mask, int32_t *base, vuint8m2_t bindex, + vint32m8_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i32mf2_m(vbool64_t mask, int32_t *base, + vuint16mf4_t bindex, vint32mf2_t value, + size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i32m1_m(vbool32_t mask, int32_t *base, vuint16mf2_t bindex, + vint32m1_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i32m2_m(vbool16_t mask, int32_t *base, vuint16m1_t bindex, + vint32m2_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i32m4_m(vbool8_t mask, int32_t *base, vuint16m2_t bindex, + vint32m4_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i32.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i32.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i32m8_m(vbool4_t mask, int32_t *base, vuint16m4_t bindex, + vint32m8_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i32mf2_m(vbool64_t mask, int32_t *base, + vuint32mf2_t bindex, vint32mf2_t value, + size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i32m1_m(vbool32_t mask, int32_t *base, vuint32m1_t bindex, + vint32m1_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i32m2_m(vbool16_t mask, int32_t *base, vuint32m2_t bindex, + vint32m2_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i32m4_m(vbool8_t mask, int32_t *base, vuint32m4_t bindex, + vint32m4_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i32.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i32.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i32m8_m(vbool4_t mask, int32_t *base, vuint32m8_t bindex, + vint32m8_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i32mf2_m(vbool64_t mask, int32_t *base, vuint64m1_t bindex, + vint32mf2_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i32m1_m(vbool32_t mask, int32_t *base, vuint64m2_t bindex, + vint32m1_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i32m2_m(vbool16_t mask, int32_t *base, vuint64m4_t bindex, + vint32m2_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i32m4_m(vbool8_t mask, int32_t *base, vuint64m8_t bindex, + vint32m4_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i64m1_m(vbool64_t mask, int64_t *base, vuint8mf8_t bindex, + vint64m1_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i64m2_m(vbool32_t mask, int64_t *base, vuint8mf4_t bindex, + vint64m2_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i64m4_m(vbool16_t mask, int64_t *base, vuint8mf2_t bindex, + vint64m4_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i64m8_m(vbool8_t mask, int64_t *base, vuint8m1_t bindex, + vint64m8_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i64m1_m(vbool64_t mask, int64_t *base, vuint16mf4_t bindex, + vint64m1_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i64m2_m(vbool32_t mask, int64_t *base, vuint16mf2_t bindex, + vint64m2_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i64m4_m(vbool16_t mask, int64_t *base, vuint16m1_t bindex, + vint64m4_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i64m8_m(vbool8_t mask, int64_t *base, vuint16m2_t bindex, + vint64m8_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i64m1_m(vbool64_t mask, int64_t *base, vuint32mf2_t bindex, + vint64m1_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i64m2_m(vbool32_t mask, int64_t *base, vuint32m1_t bindex, + vint64m2_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i64m4_m(vbool16_t mask, int64_t *base, vuint32m2_t bindex, + vint64m4_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i64m8_m(vbool8_t mask, int64_t *base, vuint32m4_t bindex, + vint64m8_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i64m1_m(vbool64_t mask, int64_t *base, vuint64m1_t bindex, + vint64m1_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i64m2_m(vbool32_t mask, int64_t *base, vuint64m2_t bindex, + vint64m2_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i64m4_m(vbool16_t mask, int64_t *base, vuint64m4_t bindex, + vint64m4_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i64m8_m(vbool8_t mask, int64_t *base, vuint64m8_t bindex, + vint64m8_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8mf8_m(vbool64_t mask, uint8_t *base, vuint8mf8_t bindex, + vuint8mf8_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8mf4_m(vbool32_t mask, uint8_t *base, vuint8mf4_t bindex, + vuint8mf4_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8mf2_m(vbool16_t mask, uint8_t *base, vuint8mf2_t bindex, + vuint8mf2_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8m1_m(vbool8_t mask, uint8_t *base, vuint8m1_t bindex, + vuint8m1_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i8.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i8.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8m2_m(vbool4_t mask, uint8_t *base, vuint8m2_t bindex, + vuint8m2_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i8.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i8.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8m4_m(vbool2_t mask, uint8_t *base, vuint8m4_t bindex, + vuint8m4_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv64i8.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv64i8.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8m8_m(vbool1_t mask, uint8_t *base, vuint8m8_t bindex, + vuint8m8_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u8mf8_m(vbool64_t mask, uint8_t *base, vuint16mf4_t bindex, + vuint8mf8_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u8mf4_m(vbool32_t mask, uint8_t *base, vuint16mf2_t bindex, + vuint8mf4_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u8mf2_m(vbool16_t mask, uint8_t *base, vuint16m1_t bindex, + vuint8mf2_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u8m1_m(vbool8_t mask, uint8_t *base, vuint16m2_t bindex, + vuint8m1_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i8.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i8.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u8m2_m(vbool4_t mask, uint8_t *base, vuint16m4_t bindex, + vuint8m2_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u8m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i8.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u8m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i8.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u8m4_m(vbool2_t mask, uint8_t *base, vuint16m8_t bindex, + vuint8m4_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u8mf8_m(vbool64_t mask, uint8_t *base, vuint32mf2_t bindex, + vuint8mf8_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u8mf4_m(vbool32_t mask, uint8_t *base, vuint32m1_t bindex, + vuint8mf4_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u8mf2_m(vbool16_t mask, uint8_t *base, vuint32m2_t bindex, + vuint8mf2_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u8m1_m(vbool8_t mask, uint8_t *base, vuint32m4_t bindex, + vuint8m1_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i8.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i8.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u8m2_m(vbool4_t mask, uint8_t *base, vuint32m8_t bindex, + vuint8m2_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u8mf8_m(vbool64_t mask, uint8_t *base, vuint64m1_t bindex, + vuint8mf8_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u8mf4_m(vbool32_t mask, uint8_t *base, vuint64m2_t bindex, + vuint8mf4_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u8mf2_m(vbool16_t mask, uint8_t *base, vuint64m4_t bindex, + vuint8mf2_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u8m1_m(vbool8_t mask, uint8_t *base, vuint64m8_t bindex, + vuint8m1_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u16mf4_m(vbool64_t mask, uint16_t *base, vuint8mf8_t bindex, + vuint16mf4_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u16mf2_m(vbool32_t mask, uint16_t *base, vuint8mf4_t bindex, + vuint16mf2_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u16m1_m(vbool16_t mask, uint16_t *base, vuint8mf2_t bindex, + vuint16m1_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u16m2_m(vbool8_t mask, uint16_t *base, vuint8m1_t bindex, + vuint16m2_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i16.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i16.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u16m4_m(vbool4_t mask, uint16_t *base, vuint8m2_t bindex, + vuint16m4_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u16m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i16.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u16m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i16.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u16m8_m(vbool2_t mask, uint16_t *base, vuint8m4_t bindex, + vuint16m8_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u16mf4_m(vbool64_t mask, uint16_t *base, + vuint16mf4_t bindex, vuint16mf4_t value, + size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u16mf2_m(vbool32_t mask, uint16_t *base, + vuint16mf2_t bindex, vuint16mf2_t value, + size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u16m1_m(vbool16_t mask, uint16_t *base, vuint16m1_t bindex, + vuint16m1_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u16m2_m(vbool8_t mask, uint16_t *base, vuint16m2_t bindex, + vuint16m2_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i16.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i16.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u16m4_m(vbool4_t mask, uint16_t *base, vuint16m4_t bindex, + vuint16m4_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u16m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i16.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u16m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i16.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u16m8_m(vbool2_t mask, uint16_t *base, vuint16m8_t bindex, + vuint16m8_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u16mf4_m(vbool64_t mask, uint16_t *base, + vuint32mf2_t bindex, vuint16mf4_t value, + size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u16mf2_m(vbool32_t mask, uint16_t *base, + vuint32m1_t bindex, vuint16mf2_t value, + size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u16m1_m(vbool16_t mask, uint16_t *base, vuint32m2_t bindex, + vuint16m1_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u16m2_m(vbool8_t mask, uint16_t *base, vuint32m4_t bindex, + vuint16m2_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i16.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i16.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u16m4_m(vbool4_t mask, uint16_t *base, vuint32m8_t bindex, + vuint16m4_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u16mf4_m(vbool64_t mask, uint16_t *base, + vuint64m1_t bindex, vuint16mf4_t value, + size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u16mf2_m(vbool32_t mask, uint16_t *base, + vuint64m2_t bindex, vuint16mf2_t value, + size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u16m1_m(vbool16_t mask, uint16_t *base, vuint64m4_t bindex, + vuint16m1_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u16m2_m(vbool8_t mask, uint16_t *base, vuint64m8_t bindex, + vuint16m2_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u32mf2_m(vbool64_t mask, uint32_t *base, vuint8mf8_t bindex, + vuint32mf2_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u32m1_m(vbool32_t mask, uint32_t *base, vuint8mf4_t bindex, + vuint32m1_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u32m2_m(vbool16_t mask, uint32_t *base, vuint8mf2_t bindex, + vuint32m2_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u32m4_m(vbool8_t mask, uint32_t *base, vuint8m1_t bindex, + vuint32m4_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i32.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i32.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u32m8_m(vbool4_t mask, uint32_t *base, vuint8m2_t bindex, + vuint32m8_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u32mf2_m(vbool64_t mask, uint32_t *base, + vuint16mf4_t bindex, vuint32mf2_t value, + size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u32m1_m(vbool32_t mask, uint32_t *base, + vuint16mf2_t bindex, vuint32m1_t value, + size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u32m2_m(vbool16_t mask, uint32_t *base, vuint16m1_t bindex, + vuint32m2_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u32m4_m(vbool8_t mask, uint32_t *base, vuint16m2_t bindex, + vuint32m4_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i32.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i32.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u32m8_m(vbool4_t mask, uint32_t *base, vuint16m4_t bindex, + vuint32m8_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u32mf2_m(vbool64_t mask, uint32_t *base, + vuint32mf2_t bindex, vuint32mf2_t value, + size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u32m1_m(vbool32_t mask, uint32_t *base, vuint32m1_t bindex, + vuint32m1_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u32m2_m(vbool16_t mask, uint32_t *base, vuint32m2_t bindex, + vuint32m2_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u32m4_m(vbool8_t mask, uint32_t *base, vuint32m4_t bindex, + vuint32m4_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i32.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i32.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u32m8_m(vbool4_t mask, uint32_t *base, vuint32m8_t bindex, + vuint32m8_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u32mf2_m(vbool64_t mask, uint32_t *base, + vuint64m1_t bindex, vuint32mf2_t value, + size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u32m1_m(vbool32_t mask, uint32_t *base, vuint64m2_t bindex, + vuint32m1_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u32m2_m(vbool16_t mask, uint32_t *base, vuint64m4_t bindex, + vuint32m2_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u32m4_m(vbool8_t mask, uint32_t *base, vuint64m8_t bindex, + vuint32m4_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u64m1_m(vbool64_t mask, uint64_t *base, vuint8mf8_t bindex, + vuint64m1_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u64m2_m(vbool32_t mask, uint64_t *base, vuint8mf4_t bindex, + vuint64m2_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u64m4_m(vbool16_t mask, uint64_t *base, vuint8mf2_t bindex, + vuint64m4_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u64m8_m(vbool8_t mask, uint64_t *base, vuint8m1_t bindex, + vuint64m8_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u64m1_m(vbool64_t mask, uint64_t *base, + vuint16mf4_t bindex, vuint64m1_t value, + size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u64m2_m(vbool32_t mask, uint64_t *base, + vuint16mf2_t bindex, vuint64m2_t value, + size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u64m4_m(vbool16_t mask, uint64_t *base, vuint16m1_t bindex, + vuint64m4_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u64m8_m(vbool8_t mask, uint64_t *base, vuint16m2_t bindex, + vuint64m8_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u64m1_m(vbool64_t mask, uint64_t *base, + vuint32mf2_t bindex, vuint64m1_t value, + size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u64m2_m(vbool32_t mask, uint64_t *base, vuint32m1_t bindex, + vuint64m2_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u64m4_m(vbool16_t mask, uint64_t *base, vuint32m2_t bindex, + vuint64m4_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u64m8_m(vbool8_t mask, uint64_t *base, vuint32m4_t bindex, + vuint64m8_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u64m1_m(vbool64_t mask, uint64_t *base, vuint64m1_t bindex, + vuint64m1_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u64m2_m(vbool32_t mask, uint64_t *base, vuint64m2_t bindex, + vuint64m2_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u64m4_m(vbool16_t mask, uint64_t *base, vuint64m4_t bindex, + vuint64m4_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u64m8_m(vbool8_t mask, uint64_t *base, vuint64m8_t bindex, + vuint64m8_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f32.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f32.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f32mf2_m(vbool64_t mask, float *base, vuint8mf8_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f32.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f32.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f32m1_m(vbool32_t mask, float *base, vuint8mf4_t bindex, + vfloat32m1_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f32.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f32.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f32m2_m(vbool16_t mask, float *base, vuint8mf2_t bindex, + vfloat32m2_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f32.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f32.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f32m4_m(vbool8_t mask, float *base, vuint8m1_t bindex, + vfloat32m4_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16f32.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16f32.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f32m8_m(vbool4_t mask, float *base, vuint8m2_t bindex, + vfloat32m8_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f32.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f32.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f32mf2_m(vbool64_t mask, float *base, vuint16mf4_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f32.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f32.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f32m1_m(vbool32_t mask, float *base, vuint16mf2_t bindex, + vfloat32m1_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f32.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f32.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f32m2_m(vbool16_t mask, float *base, vuint16m1_t bindex, + vfloat32m2_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f32.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f32.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f32m4_m(vbool8_t mask, float *base, vuint16m2_t bindex, + vfloat32m4_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16f32.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16f32.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f32m8_m(vbool4_t mask, float *base, vuint16m4_t bindex, + vfloat32m8_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f32.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f32.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f32mf2_m(vbool64_t mask, float *base, vuint32mf2_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f32.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f32.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f32m1_m(vbool32_t mask, float *base, vuint32m1_t bindex, + vfloat32m1_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f32.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f32.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f32m2_m(vbool16_t mask, float *base, vuint32m2_t bindex, + vfloat32m2_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f32.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f32.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f32m4_m(vbool8_t mask, float *base, vuint32m4_t bindex, + vfloat32m4_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16f32.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16f32.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f32m8_m(vbool4_t mask, float *base, vuint32m8_t bindex, + vfloat32m8_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f32.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f32.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f32mf2_m(vbool64_t mask, float *base, vuint64m1_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f32.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f32.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f32m1_m(vbool32_t mask, float *base, vuint64m2_t bindex, + vfloat32m1_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f32.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f32.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f32m2_m(vbool16_t mask, float *base, vuint64m4_t bindex, + vfloat32m2_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f32.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f32.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f32m4_m(vbool8_t mask, float *base, vuint64m8_t bindex, + vfloat32m4_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f64.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f64.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f64m1_m(vbool64_t mask, double *base, vuint8mf8_t bindex, + vfloat64m1_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f64.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f64.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f64m2_m(vbool32_t mask, double *base, vuint8mf4_t bindex, + vfloat64m2_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f64.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f64.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f64m4_m(vbool16_t mask, double *base, vuint8mf2_t bindex, + vfloat64m4_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f64.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f64.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f64m8_m(vbool8_t mask, double *base, vuint8m1_t bindex, + vfloat64m8_t value, size_t vl) { + return vsuxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f64.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f64.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f64m1_m(vbool64_t mask, double *base, vuint16mf4_t bindex, + vfloat64m1_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f64.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f64.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f64m2_m(vbool32_t mask, double *base, vuint16mf2_t bindex, + vfloat64m2_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f64.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f64.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f64m4_m(vbool16_t mask, double *base, vuint16m1_t bindex, + vfloat64m4_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f64.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f64.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f64m8_m(vbool8_t mask, double *base, vuint16m2_t bindex, + vfloat64m8_t value, size_t vl) { + return vsuxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f64.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f64.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f64m1_m(vbool64_t mask, double *base, vuint32mf2_t bindex, + vfloat64m1_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f64.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f64.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f64m2_m(vbool32_t mask, double *base, vuint32m1_t bindex, + vfloat64m2_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f64.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f64.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f64m4_m(vbool16_t mask, double *base, vuint32m2_t bindex, + vfloat64m4_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f64.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f64.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f64m8_m(vbool8_t mask, double *base, vuint32m4_t bindex, + vfloat64m8_t value, size_t vl) { + return vsuxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f64.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f64.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f64m1_m(vbool64_t mask, double *base, vuint64m1_t bindex, + vfloat64m1_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f64.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f64.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f64m2_m(vbool32_t mask, double *base, vuint64m2_t bindex, + vfloat64m2_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f64.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f64.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f64m4_m(vbool16_t mask, double *base, vuint64m4_t bindex, + vfloat64m4_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f64.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f64.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR8]] +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f64m8_m(vbool8_t mask, double *base, vuint64m8_t bindex, + vfloat64m8_t value, size_t vl) { + return vsuxei64(mask, base, bindex, value, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics/vle.c b/clang/test/CodeGen/RISCV/rvv-intrinsics/vle.c --- a/clang/test/CodeGen/RISCV/rvv-intrinsics/vle.c +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics/vle.c @@ -1705,3 +1705,115 @@ vfloat64m8_t test_vle64_v_f64m8_m(vbool8_t mask, vfloat64m8_t maskedoff, const double *base, size_t vl) { return vle64_v_f64m8_m(mask, maskedoff, base, vl); } + +// CHECK-RV32-LABEL: @test_vle1_v_b1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vle1.nxv64i1.i32(* [[TMP0]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vle1_v_b1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vle1.nxv64i1.i64(* [[TMP0]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vbool1_t test_vle1_v_b1(const uint8_t *base, size_t vl) { + return vle1_v_b1(base, vl); +} + +// CHECK-RV32-LABEL: @test_vle1_v_b2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vle1.nxv32i1.i32(* [[TMP0]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vle1_v_b2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vle1.nxv32i1.i64(* [[TMP0]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vbool2_t test_vle1_v_b2(const uint8_t *base, size_t vl) { + return vle1_v_b2(base, vl); +} + +// CHECK-RV32-LABEL: @test_vle1_v_b4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vle1.nxv16i1.i32(* [[TMP0]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vle1_v_b4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vle1.nxv16i1.i64(* [[TMP0]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vbool4_t test_vle1_v_b4(const uint8_t *base, size_t vl) { + return vle1_v_b4(base, vl); +} + +// CHECK-RV32-LABEL: @test_vle1_v_b8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vle1.nxv8i1.i32(* [[TMP0]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vle1_v_b8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vle1.nxv8i1.i64(* [[TMP0]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vbool8_t test_vle1_v_b8(const uint8_t *base, size_t vl) { + return vle1_v_b8(base, vl); +} + +// CHECK-RV32-LABEL: @test_vle1_v_b16( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vle1.nxv4i1.i32(* [[TMP0]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vle1_v_b16( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vle1.nxv4i1.i64(* [[TMP0]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vbool16_t test_vle1_v_b16(const uint8_t *base, size_t vl) { + return vle1_v_b16(base, vl); +} + +// CHECK-RV32-LABEL: @test_vle1_v_b32( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vle1.nxv2i1.i32(* [[TMP0]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vle1_v_b32( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vle1.nxv2i1.i64(* [[TMP0]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vbool32_t test_vle1_v_b32(const uint8_t *base, size_t vl) { + return vle1_v_b32(base, vl); +} + +// CHECK-RV32-LABEL: @test_vle1_v_b64( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vle1.nxv1i1.i32(* [[TMP0]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vle1_v_b64( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vle1.nxv1i1.i64(* [[TMP0]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vbool64_t test_vle1_v_b64(const uint8_t *base, size_t vl) { + return vle1_v_b64(base, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics/vlse.c b/clang/test/CodeGen/RISCV/rvv-intrinsics/vlse.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics/vlse.c @@ -0,0 +1,1862 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv32 -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV32 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -Werror -Wall -o - %s -S >/dev/null 2>&1 | FileCheck --check-prefix=ASM --allow-empty %s + +// ASM-NOT: warning +#include + +// CHECK-RV32-LABEL: @test_vlse8_v_i8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv1i8.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_i8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv1i8.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint8mf8_t test_vlse8_v_i8mf8(const int8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8_v_i8mf8(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_i8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv2i8.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_i8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv2i8.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint8mf4_t test_vlse8_v_i8mf4(const int8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8_v_i8mf4(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_i8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv4i8.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_i8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv4i8.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint8mf2_t test_vlse8_v_i8mf2(const int8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8_v_i8mf2(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_i8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv8i8.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_i8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv8i8.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint8m1_t test_vlse8_v_i8m1(const int8_t *base, ptrdiff_t bstride, size_t vl) { + return vlse8_v_i8m1(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_i8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv16i8.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_i8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv16i8.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint8m2_t test_vlse8_v_i8m2(const int8_t *base, ptrdiff_t bstride, size_t vl) { + return vlse8_v_i8m2(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_i8m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv32i8.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_i8m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv32i8.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint8m4_t test_vlse8_v_i8m4(const int8_t *base, ptrdiff_t bstride, size_t vl) { + return vlse8_v_i8m4(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_i8m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv64i8.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_i8m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv64i8.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint8m8_t test_vlse8_v_i8m8(const int8_t *base, ptrdiff_t bstride, size_t vl) { + return vlse8_v_i8m8(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_i16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv1i16.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_i16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv1i16.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint16mf4_t test_vlse16_v_i16mf4(const int16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16_v_i16mf4(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_i16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv2i16.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_i16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv2i16.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint16mf2_t test_vlse16_v_i16mf2(const int16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16_v_i16mf2(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_i16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv4i16.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_i16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv4i16.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint16m1_t test_vlse16_v_i16m1(const int16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16_v_i16m1(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_i16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv8i16.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_i16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv8i16.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint16m2_t test_vlse16_v_i16m2(const int16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16_v_i16m2(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_i16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv16i16.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_i16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv16i16.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint16m4_t test_vlse16_v_i16m4(const int16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16_v_i16m4(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_i16m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv32i16.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_i16m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv32i16.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint16m8_t test_vlse16_v_i16m8(const int16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16_v_i16m8(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv1i32.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv1i32.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vlse32_v_i32mf2(const int32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_i32mf2(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv2i32.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv2i32.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vlse32_v_i32m1(const int32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_i32m1(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv4i32.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv4i32.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vlse32_v_i32m2(const int32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_i32m2(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv8i32.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv8i32.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vlse32_v_i32m4(const int32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_i32m4(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv16i32.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv16i32.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vlse32_v_i32m8(const int32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_i32m8(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv1i64.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv1i64.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vlse64_v_i64m1(const int64_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse64_v_i64m1(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv2i64.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv2i64.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vlse64_v_i64m2(const int64_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse64_v_i64m2(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv4i64.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv4i64.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vlse64_v_i64m4(const int64_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse64_v_i64m4(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv8i64.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv8i64.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vlse64_v_i64m8(const int64_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse64_v_i64m8(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_u8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv1i8.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_u8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv1i8.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint8mf8_t test_vlse8_v_u8mf8(const uint8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8_v_u8mf8(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_u8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv2i8.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_u8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv2i8.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint8mf4_t test_vlse8_v_u8mf4(const uint8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8_v_u8mf4(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_u8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv4i8.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_u8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv4i8.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint8mf2_t test_vlse8_v_u8mf2(const uint8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8_v_u8mf2(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_u8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv8i8.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_u8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv8i8.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint8m1_t test_vlse8_v_u8m1(const uint8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8_v_u8m1(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_u8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv16i8.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_u8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv16i8.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint8m2_t test_vlse8_v_u8m2(const uint8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8_v_u8m2(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_u8m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv32i8.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_u8m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv32i8.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint8m4_t test_vlse8_v_u8m4(const uint8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8_v_u8m4(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_u8m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv64i8.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_u8m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv64i8.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint8m8_t test_vlse8_v_u8m8(const uint8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8_v_u8m8(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_u16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv1i16.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_u16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv1i16.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint16mf4_t test_vlse16_v_u16mf4(const uint16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16_v_u16mf4(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_u16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv2i16.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_u16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv2i16.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint16mf2_t test_vlse16_v_u16mf2(const uint16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16_v_u16mf2(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_u16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv4i16.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_u16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv4i16.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint16m1_t test_vlse16_v_u16m1(const uint16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16_v_u16m1(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_u16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv8i16.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_u16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv8i16.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint16m2_t test_vlse16_v_u16m2(const uint16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16_v_u16m2(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_u16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv16i16.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_u16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv16i16.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint16m4_t test_vlse16_v_u16m4(const uint16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16_v_u16m4(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_u16m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv32i16.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_u16m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv32i16.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint16m8_t test_vlse16_v_u16m8(const uint16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16_v_u16m8(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv1i32.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv1i32.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vlse32_v_u32mf2(const uint32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_u32mf2(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv2i32.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv2i32.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vlse32_v_u32m1(const uint32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_u32m1(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv4i32.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv4i32.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vlse32_v_u32m2(const uint32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_u32m2(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv8i32.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv8i32.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vlse32_v_u32m4(const uint32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_u32m4(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv16i32.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv16i32.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vlse32_v_u32m8(const uint32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_u32m8(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv1i64.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv1i64.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vlse64_v_u64m1(const uint64_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse64_v_u64m1(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv2i64.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv2i64.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vlse64_v_u64m2(const uint64_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse64_v_u64m2(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv4i64.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv4i64.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vlse64_v_u64m4(const uint64_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse64_v_u64m4(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv8i64.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv8i64.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vlse64_v_u64m8(const uint64_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse64_v_u64m8(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv1f32.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv1f32.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32mf2_t test_vlse32_v_f32mf2(const float *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_f32mf2(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv2f32.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv2f32.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m1_t test_vlse32_v_f32m1(const float *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_f32m1(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv4f32.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv4f32.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m2_t test_vlse32_v_f32m2(const float *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_f32m2(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv8f32.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv8f32.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m4_t test_vlse32_v_f32m4(const float *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_f32m4(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_f32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv16f32.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_f32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv16f32.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m8_t test_vlse32_v_f32m8(const float *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_f32m8(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv1f64.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv1f64.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m1_t test_vlse64_v_f64m1(const double *base, ptrdiff_t bstride, + size_t vl) { + return vlse64_v_f64m1(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv2f64.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv2f64.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m2_t test_vlse64_v_f64m2(const double *base, ptrdiff_t bstride, + size_t vl) { + return vlse64_v_f64m2(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv4f64.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv4f64.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m4_t test_vlse64_v_f64m4(const double *base, ptrdiff_t bstride, + size_t vl) { + return vlse64_v_f64m4(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv8f64.i32(* [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.nxv8f64.i64(* [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m8_t test_vlse64_v_f64m8(const double *base, ptrdiff_t bstride, + size_t vl) { + return vlse64_v_f64m8(base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_i8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_i8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint8mf8_t test_vlse8_v_i8mf8_m(vbool64_t mask, vint8mf8_t maskedoff, + const int8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8_v_i8mf8_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_i8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_i8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint8mf4_t test_vlse8_v_i8mf4_m(vbool32_t mask, vint8mf4_t maskedoff, + const int8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8_v_i8mf4_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_i8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_i8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint8mf2_t test_vlse8_v_i8mf2_m(vbool16_t mask, vint8mf2_t maskedoff, + const int8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8_v_i8mf2_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_i8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_i8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint8m1_t test_vlse8_v_i8m1_m(vbool8_t mask, vint8m1_t maskedoff, + const int8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8_v_i8m1_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_i8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_i8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint8m2_t test_vlse8_v_i8m2_m(vbool4_t mask, vint8m2_t maskedoff, + const int8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8_v_i8m2_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_i8m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv32i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_i8m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv32i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint8m4_t test_vlse8_v_i8m4_m(vbool2_t mask, vint8m4_t maskedoff, + const int8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8_v_i8m4_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_i8m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv64i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_i8m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv64i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint8m8_t test_vlse8_v_i8m8_m(vbool1_t mask, vint8m8_t maskedoff, + const int8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8_v_i8m8_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_i16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i16.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_i16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i16.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint16mf4_t test_vlse16_v_i16mf4_m(vbool64_t mask, vint16mf4_t maskedoff, + const int16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16_v_i16mf4_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_i16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i16.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_i16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i16.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint16mf2_t test_vlse16_v_i16mf2_m(vbool32_t mask, vint16mf2_t maskedoff, + const int16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16_v_i16mf2_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_i16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i16.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_i16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i16.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint16m1_t test_vlse16_v_i16m1_m(vbool16_t mask, vint16m1_t maskedoff, + const int16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16_v_i16m1_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_i16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i16.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_i16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i16.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint16m2_t test_vlse16_v_i16m2_m(vbool8_t mask, vint16m2_t maskedoff, + const int16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16_v_i16m2_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_i16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16i16.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_i16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16i16.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint16m4_t test_vlse16_v_i16m4_m(vbool4_t mask, vint16m4_t maskedoff, + const int16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16_v_i16m4_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_i16m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv32i16.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_i16m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv32i16.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint16m8_t test_vlse16_v_i16m8_m(vbool2_t mask, vint16m8_t maskedoff, + const int16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16_v_i16m8_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vlse32_v_i32mf2_m(vbool64_t mask, vint32mf2_t maskedoff, + const int32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_i32mf2_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vlse32_v_i32m1_m(vbool32_t mask, vint32m1_t maskedoff, + const int32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_i32m1_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vlse32_v_i32m2_m(vbool16_t mask, vint32m2_t maskedoff, + const int32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_i32m2_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vlse32_v_i32m4_m(vbool8_t mask, vint32m4_t maskedoff, + const int32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_i32m4_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16i32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16i32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vlse32_v_i32m8_m(vbool4_t mask, vint32m8_t maskedoff, + const int32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_i32m8_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i64.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i64.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vlse64_v_i64m1_m(vbool64_t mask, vint64m1_t maskedoff, + const int64_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse64_v_i64m1_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i64.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i64.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vlse64_v_i64m2_m(vbool32_t mask, vint64m2_t maskedoff, + const int64_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse64_v_i64m2_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i64.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i64.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vlse64_v_i64m4_m(vbool16_t mask, vint64m4_t maskedoff, + const int64_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse64_v_i64m4_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i64.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i64.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vlse64_v_i64m8_m(vbool8_t mask, vint64m8_t maskedoff, + const int64_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse64_v_i64m8_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_u8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_u8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint8mf8_t test_vlse8_v_u8mf8_m(vbool64_t mask, vuint8mf8_t maskedoff, + const uint8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8_v_u8mf8_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_u8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_u8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint8mf4_t test_vlse8_v_u8mf4_m(vbool32_t mask, vuint8mf4_t maskedoff, + const uint8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8_v_u8mf4_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_u8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_u8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint8mf2_t test_vlse8_v_u8mf2_m(vbool16_t mask, vuint8mf2_t maskedoff, + const uint8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8_v_u8mf2_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_u8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_u8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint8m1_t test_vlse8_v_u8m1_m(vbool8_t mask, vuint8m1_t maskedoff, + const uint8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8_v_u8m1_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_u8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_u8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint8m2_t test_vlse8_v_u8m2_m(vbool4_t mask, vuint8m2_t maskedoff, + const uint8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8_v_u8m2_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_u8m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv32i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_u8m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv32i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint8m4_t test_vlse8_v_u8m4_m(vbool2_t mask, vuint8m4_t maskedoff, + const uint8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8_v_u8m4_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse8_v_u8m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv64i8.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse8_v_u8m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv64i8.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint8m8_t test_vlse8_v_u8m8_m(vbool1_t mask, vuint8m8_t maskedoff, + const uint8_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse8_v_u8m8_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_u16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i16.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_u16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i16.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint16mf4_t test_vlse16_v_u16mf4_m(vbool64_t mask, vuint16mf4_t maskedoff, + const uint16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16_v_u16mf4_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_u16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i16.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_u16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i16.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint16mf2_t test_vlse16_v_u16mf2_m(vbool32_t mask, vuint16mf2_t maskedoff, + const uint16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16_v_u16mf2_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_u16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i16.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_u16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i16.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint16m1_t test_vlse16_v_u16m1_m(vbool16_t mask, vuint16m1_t maskedoff, + const uint16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16_v_u16m1_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_u16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i16.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_u16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i16.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint16m2_t test_vlse16_v_u16m2_m(vbool8_t mask, vuint16m2_t maskedoff, + const uint16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16_v_u16m2_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_u16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16i16.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_u16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16i16.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint16m4_t test_vlse16_v_u16m4_m(vbool4_t mask, vuint16m4_t maskedoff, + const uint16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16_v_u16m4_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse16_v_u16m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv32i16.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse16_v_u16m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv32i16.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint16m8_t test_vlse16_v_u16m8_m(vbool2_t mask, vuint16m8_t maskedoff, + const uint16_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse16_v_u16m8_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vlse32_v_u32mf2_m(vbool64_t mask, vuint32mf2_t maskedoff, + const uint32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_u32mf2_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vlse32_v_u32m1_m(vbool32_t mask, vuint32m1_t maskedoff, + const uint32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_u32m1_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vlse32_v_u32m2_m(vbool16_t mask, vuint32m2_t maskedoff, + const uint32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_u32m2_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vlse32_v_u32m4_m(vbool8_t mask, vuint32m4_t maskedoff, + const uint32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_u32m4_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16i32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16i32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vlse32_v_u32m8_m(vbool4_t mask, vuint32m8_t maskedoff, + const uint32_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_u32m8_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i64.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1i64.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vlse64_v_u64m1_m(vbool64_t mask, vuint64m1_t maskedoff, + const uint64_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse64_v_u64m1_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i64.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2i64.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vlse64_v_u64m2_m(vbool32_t mask, vuint64m2_t maskedoff, + const uint64_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse64_v_u64m2_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i64.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4i64.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vlse64_v_u64m4_m(vbool16_t mask, vuint64m4_t maskedoff, + const uint64_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse64_v_u64m4_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i64.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8i64.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vlse64_v_u64m8_m(vbool8_t mask, vuint64m8_t maskedoff, + const uint64_t *base, ptrdiff_t bstride, + size_t vl) { + return vlse64_v_u64m8_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1f32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1f32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32mf2_t test_vlse32_v_f32mf2_m(vbool64_t mask, vfloat32mf2_t maskedoff, + const float *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_f32mf2_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2f32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2f32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m1_t test_vlse32_v_f32m1_m(vbool32_t mask, vfloat32m1_t maskedoff, + const float *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_f32m1_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4f32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4f32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m2_t test_vlse32_v_f32m2_m(vbool16_t mask, vfloat32m2_t maskedoff, + const float *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_f32m2_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8f32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8f32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m4_t test_vlse32_v_f32m4_m(vbool8_t mask, vfloat32m4_t maskedoff, + const float *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_f32m4_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse32_v_f32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16f32.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse32_v_f32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv16f32.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m8_t test_vlse32_v_f32m8_m(vbool4_t mask, vfloat32m8_t maskedoff, + const float *base, ptrdiff_t bstride, + size_t vl) { + return vlse32_v_f32m8_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1f64.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv1f64.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m1_t test_vlse64_v_f64m1_m(vbool64_t mask, vfloat64m1_t maskedoff, + const double *base, ptrdiff_t bstride, + size_t vl) { + return vlse64_v_f64m1_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2f64.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv2f64.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m2_t test_vlse64_v_f64m2_m(vbool32_t mask, vfloat64m2_t maskedoff, + const double *base, ptrdiff_t bstride, + size_t vl) { + return vlse64_v_f64m2_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4f64.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv4f64.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m4_t test_vlse64_v_f64m4_m(vbool16_t mask, vfloat64m4_t maskedoff, + const double *base, ptrdiff_t bstride, + size_t vl) { + return vlse64_v_f64m4_m(mask, maskedoff, base, bstride, vl); +} + +// CHECK-RV32-LABEL: @test_vlse64_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8f64.i32( [[MASKEDOFF:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vlse64_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vlse.mask.nxv8f64.i64( [[MASKEDOFF:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m8_t test_vlse64_v_f64m8_m(vbool8_t mask, vfloat64m8_t maskedoff, + const double *base, ptrdiff_t bstride, + size_t vl) { + return vlse64_v_f64m8_m(mask, maskedoff, base, bstride, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics/vse.c b/clang/test/CodeGen/RISCV/rvv-intrinsics/vse.c --- a/clang/test/CodeGen/RISCV/rvv-intrinsics/vse.c +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics/vse.c @@ -1705,3 +1705,115 @@ void test_vse64_v_f64m8_m(vbool8_t mask, double *base, vfloat64m8_t value, size_t vl) { return vse64_v_f64m8_m(mask, base, value, vl); } + +// CHECK-RV32-LABEL: @test_vse1_v_b1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vse1.nxv64i1.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vse1_v_b1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vse1.nxv64i1.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vse1_v_b1(uint8_t *base, vbool1_t value, size_t vl) { + return vse1_v_b1(base, value, vl); +} + +// CHECK-RV32-LABEL: @test_vse1_v_b2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vse1.nxv32i1.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vse1_v_b2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vse1.nxv32i1.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vse1_v_b2(uint8_t *base, vbool2_t value, size_t vl) { + return vse1_v_b2(base, value, vl); +} + +// CHECK-RV32-LABEL: @test_vse1_v_b4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vse1.nxv16i1.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vse1_v_b4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vse1.nxv16i1.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vse1_v_b4(uint8_t *base, vbool4_t value, size_t vl) { + return vse1_v_b4(base, value, vl); +} + +// CHECK-RV32-LABEL: @test_vse1_v_b8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vse1.nxv8i1.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vse1_v_b8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vse1.nxv8i1.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vse1_v_b8(uint8_t *base, vbool8_t value, size_t vl) { + return vse1_v_b8(base, value, vl); +} + +// CHECK-RV32-LABEL: @test_vse1_v_b16( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vse1.nxv4i1.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vse1_v_b16( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vse1.nxv4i1.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vse1_v_b16(uint8_t *base, vbool16_t value, size_t vl) { + return vse1_v_b16(base, value, vl); +} + +// CHECK-RV32-LABEL: @test_vse1_v_b32( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vse1.nxv2i1.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vse1_v_b32( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vse1.nxv2i1.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vse1_v_b32(uint8_t *base, vbool32_t value, size_t vl) { + return vse1_v_b32(base, value, vl); +} + +// CHECK-RV32-LABEL: @test_vse1_v_b64( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vse1.nxv1i1.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vse1_v_b64( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vse1.nxv1i1.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vse1_v_b64(uint8_t *base, vbool64_t value, size_t vl) { + return vse1_v_b64(base, value, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics/vsoxei.c b/clang/test/CodeGen/RISCV/rvv-intrinsics/vsoxei.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics/vsoxei.c @@ -0,0 +1,6523 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv32 -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV32 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -Werror -Wall -o - %s -S >/dev/null 2>&1 | FileCheck --check-prefix=ASM --allow-empty %s + +// ASM-NOT: warning +#include + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8mf8(int8_t *base, vuint8mf8_t bindex, vint8mf8_t value, + size_t vl) { + return vsoxei8_v_i8mf8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8mf4(int8_t *base, vuint8mf4_t bindex, vint8mf4_t value, + size_t vl) { + return vsoxei8_v_i8mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8mf2(int8_t *base, vuint8mf2_t bindex, vint8mf2_t value, + size_t vl) { + return vsoxei8_v_i8mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8m1(int8_t *base, vuint8m1_t bindex, vint8m1_t value, + size_t vl) { + return vsoxei8_v_i8m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8m2(int8_t *base, vuint8m2_t bindex, vint8m2_t value, + size_t vl) { + return vsoxei8_v_i8m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv32i8.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i8.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8m4(int8_t *base, vuint8m4_t bindex, vint8m4_t value, + size_t vl) { + return vsoxei8_v_i8m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv64i8.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv64i8.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8m8(int8_t *base, vuint8m8_t bindex, vint8m8_t value, + size_t vl) { + return vsoxei8_v_i8m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i8mf8(int8_t *base, vuint16mf4_t bindex, vint8mf8_t value, + size_t vl) { + return vsoxei16_v_i8mf8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i8mf4(int8_t *base, vuint16mf2_t bindex, vint8mf4_t value, + size_t vl) { + return vsoxei16_v_i8mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i8mf2(int8_t *base, vuint16m1_t bindex, vint8mf2_t value, + size_t vl) { + return vsoxei16_v_i8mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i8m1(int8_t *base, vuint16m2_t bindex, vint8m1_t value, + size_t vl) { + return vsoxei16_v_i8m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i8m2(int8_t *base, vuint16m4_t bindex, vint8m2_t value, + size_t vl) { + return vsoxei16_v_i8m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i8m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv32i8.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i8m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i8.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i8m4(int8_t *base, vuint16m8_t bindex, vint8m4_t value, + size_t vl) { + return vsoxei16_v_i8m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i8mf8(int8_t *base, vuint32mf2_t bindex, vint8mf8_t value, + size_t vl) { + return vsoxei32_v_i8mf8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i8mf4(int8_t *base, vuint32m1_t bindex, vint8mf4_t value, + size_t vl) { + return vsoxei32_v_i8mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i8mf2(int8_t *base, vuint32m2_t bindex, vint8mf2_t value, + size_t vl) { + return vsoxei32_v_i8mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i8m1(int8_t *base, vuint32m4_t bindex, vint8m1_t value, + size_t vl) { + return vsoxei32_v_i8m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i8m2(int8_t *base, vuint32m8_t bindex, vint8m2_t value, + size_t vl) { + return vsoxei32_v_i8m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i8mf8(int8_t *base, vuint64m1_t bindex, vint8mf8_t value, + size_t vl) { + return vsoxei64_v_i8mf8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i8mf4(int8_t *base, vuint64m2_t bindex, vint8mf4_t value, + size_t vl) { + return vsoxei64_v_i8mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i8mf2(int8_t *base, vuint64m4_t bindex, vint8mf2_t value, + size_t vl) { + return vsoxei64_v_i8mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i8m1(int8_t *base, vuint64m8_t bindex, vint8m1_t value, + size_t vl) { + return vsoxei64_v_i8m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i16mf4(int16_t *base, vuint8mf8_t bindex, vint16mf4_t value, + size_t vl) { + return vsoxei8_v_i16mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i16mf2(int16_t *base, vuint8mf4_t bindex, vint16mf2_t value, + size_t vl) { + return vsoxei8_v_i16mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i16m1(int16_t *base, vuint8mf2_t bindex, vint16m1_t value, + size_t vl) { + return vsoxei8_v_i16m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i16m2(int16_t *base, vuint8m1_t bindex, vint16m2_t value, + size_t vl) { + return vsoxei8_v_i16m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i16m4(int16_t *base, vuint8m2_t bindex, vint16m4_t value, + size_t vl) { + return vsoxei8_v_i16m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i16m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv32i16.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i16m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i16.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i16m8(int16_t *base, vuint8m4_t bindex, vint16m8_t value, + size_t vl) { + return vsoxei8_v_i16m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i16mf4(int16_t *base, vuint16mf4_t bindex, + vint16mf4_t value, size_t vl) { + return vsoxei16_v_i16mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i16mf2(int16_t *base, vuint16mf2_t bindex, + vint16mf2_t value, size_t vl) { + return vsoxei16_v_i16mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i16m1(int16_t *base, vuint16m1_t bindex, vint16m1_t value, + size_t vl) { + return vsoxei16_v_i16m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i16m2(int16_t *base, vuint16m2_t bindex, vint16m2_t value, + size_t vl) { + return vsoxei16_v_i16m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i16m4(int16_t *base, vuint16m4_t bindex, vint16m4_t value, + size_t vl) { + return vsoxei16_v_i16m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i16m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv32i16.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i16m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i16.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i16m8(int16_t *base, vuint16m8_t bindex, vint16m8_t value, + size_t vl) { + return vsoxei16_v_i16m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i16mf4(int16_t *base, vuint32mf2_t bindex, + vint16mf4_t value, size_t vl) { + return vsoxei32_v_i16mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i16mf2(int16_t *base, vuint32m1_t bindex, + vint16mf2_t value, size_t vl) { + return vsoxei32_v_i16mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i16m1(int16_t *base, vuint32m2_t bindex, vint16m1_t value, + size_t vl) { + return vsoxei32_v_i16m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i16m2(int16_t *base, vuint32m4_t bindex, vint16m2_t value, + size_t vl) { + return vsoxei32_v_i16m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i16m4(int16_t *base, vuint32m8_t bindex, vint16m4_t value, + size_t vl) { + return vsoxei32_v_i16m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i16mf4(int16_t *base, vuint64m1_t bindex, + vint16mf4_t value, size_t vl) { + return vsoxei64_v_i16mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i16mf2(int16_t *base, vuint64m2_t bindex, + vint16mf2_t value, size_t vl) { + return vsoxei64_v_i16mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i16m1(int16_t *base, vuint64m4_t bindex, vint16m1_t value, + size_t vl) { + return vsoxei64_v_i16m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i16m2(int16_t *base, vuint64m8_t bindex, vint16m2_t value, + size_t vl) { + return vsoxei64_v_i16m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i32mf2(int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, + size_t vl) { + return vsoxei8_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i32m1(int32_t *base, vuint8mf4_t bindex, vint32m1_t value, + size_t vl) { + return vsoxei8_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i32m2(int32_t *base, vuint8mf2_t bindex, vint32m2_t value, + size_t vl) { + return vsoxei8_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i32m4(int32_t *base, vuint8m1_t bindex, vint32m4_t value, + size_t vl) { + return vsoxei8_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i32m8(int32_t *base, vuint8m2_t bindex, vint32m8_t value, + size_t vl) { + return vsoxei8_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i32mf2(int32_t *base, vuint16mf4_t bindex, + vint32mf2_t value, size_t vl) { + return vsoxei16_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i32m1(int32_t *base, vuint16mf2_t bindex, vint32m1_t value, + size_t vl) { + return vsoxei16_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i32m2(int32_t *base, vuint16m1_t bindex, vint32m2_t value, + size_t vl) { + return vsoxei16_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i32m4(int32_t *base, vuint16m2_t bindex, vint32m4_t value, + size_t vl) { + return vsoxei16_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i32m8(int32_t *base, vuint16m4_t bindex, vint32m8_t value, + size_t vl) { + return vsoxei16_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i32mf2(int32_t *base, vuint32mf2_t bindex, + vint32mf2_t value, size_t vl) { + return vsoxei32_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i32m1(int32_t *base, vuint32m1_t bindex, vint32m1_t value, + size_t vl) { + return vsoxei32_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i32m2(int32_t *base, vuint32m2_t bindex, vint32m2_t value, + size_t vl) { + return vsoxei32_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i32m4(int32_t *base, vuint32m4_t bindex, vint32m4_t value, + size_t vl) { + return vsoxei32_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i32m8(int32_t *base, vuint32m8_t bindex, vint32m8_t value, + size_t vl) { + return vsoxei32_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i32mf2(int32_t *base, vuint64m1_t bindex, + vint32mf2_t value, size_t vl) { + return vsoxei64_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i32m1(int32_t *base, vuint64m2_t bindex, vint32m1_t value, + size_t vl) { + return vsoxei64_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i32m2(int32_t *base, vuint64m4_t bindex, vint32m2_t value, + size_t vl) { + return vsoxei64_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i32m4(int32_t *base, vuint64m8_t bindex, vint32m4_t value, + size_t vl) { + return vsoxei64_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i64m1(int64_t *base, vuint8mf8_t bindex, vint64m1_t value, + size_t vl) { + return vsoxei8_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i64m2(int64_t *base, vuint8mf4_t bindex, vint64m2_t value, + size_t vl) { + return vsoxei8_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i64m4(int64_t *base, vuint8mf2_t bindex, vint64m4_t value, + size_t vl) { + return vsoxei8_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i64m8(int64_t *base, vuint8m1_t bindex, vint64m8_t value, + size_t vl) { + return vsoxei8_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i64m1(int64_t *base, vuint16mf4_t bindex, vint64m1_t value, + size_t vl) { + return vsoxei16_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i64m2(int64_t *base, vuint16mf2_t bindex, vint64m2_t value, + size_t vl) { + return vsoxei16_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i64m4(int64_t *base, vuint16m1_t bindex, vint64m4_t value, + size_t vl) { + return vsoxei16_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i64m8(int64_t *base, vuint16m2_t bindex, vint64m8_t value, + size_t vl) { + return vsoxei16_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i64m1(int64_t *base, vuint32mf2_t bindex, vint64m1_t value, + size_t vl) { + return vsoxei32_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i64m2(int64_t *base, vuint32m1_t bindex, vint64m2_t value, + size_t vl) { + return vsoxei32_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i64m4(int64_t *base, vuint32m2_t bindex, vint64m4_t value, + size_t vl) { + return vsoxei32_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i64m8(int64_t *base, vuint32m4_t bindex, vint64m8_t value, + size_t vl) { + return vsoxei32_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i64m1(int64_t *base, vuint64m1_t bindex, vint64m1_t value, + size_t vl) { + return vsoxei64_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i64m2(int64_t *base, vuint64m2_t bindex, vint64m2_t value, + size_t vl) { + return vsoxei64_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i64m4(int64_t *base, vuint64m4_t bindex, vint64m4_t value, + size_t vl) { + return vsoxei64_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i64m8(int64_t *base, vuint64m8_t bindex, vint64m8_t value, + size_t vl) { + return vsoxei64_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8mf8(uint8_t *base, vuint8mf8_t bindex, vuint8mf8_t value, + size_t vl) { + return vsoxei8_v_u8mf8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8mf4(uint8_t *base, vuint8mf4_t bindex, vuint8mf4_t value, + size_t vl) { + return vsoxei8_v_u8mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8mf2(uint8_t *base, vuint8mf2_t bindex, vuint8mf2_t value, + size_t vl) { + return vsoxei8_v_u8mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8m1(uint8_t *base, vuint8m1_t bindex, vuint8m1_t value, + size_t vl) { + return vsoxei8_v_u8m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8m2(uint8_t *base, vuint8m2_t bindex, vuint8m2_t value, + size_t vl) { + return vsoxei8_v_u8m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv32i8.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i8.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8m4(uint8_t *base, vuint8m4_t bindex, vuint8m4_t value, + size_t vl) { + return vsoxei8_v_u8m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv64i8.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv64i8.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8m8(uint8_t *base, vuint8m8_t bindex, vuint8m8_t value, + size_t vl) { + return vsoxei8_v_u8m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u8mf8(uint8_t *base, vuint16mf4_t bindex, + vuint8mf8_t value, size_t vl) { + return vsoxei16_v_u8mf8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u8mf4(uint8_t *base, vuint16mf2_t bindex, + vuint8mf4_t value, size_t vl) { + return vsoxei16_v_u8mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u8mf2(uint8_t *base, vuint16m1_t bindex, vuint8mf2_t value, + size_t vl) { + return vsoxei16_v_u8mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u8m1(uint8_t *base, vuint16m2_t bindex, vuint8m1_t value, + size_t vl) { + return vsoxei16_v_u8m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u8m2(uint8_t *base, vuint16m4_t bindex, vuint8m2_t value, + size_t vl) { + return vsoxei16_v_u8m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u8m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv32i8.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u8m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i8.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u8m4(uint8_t *base, vuint16m8_t bindex, vuint8m4_t value, + size_t vl) { + return vsoxei16_v_u8m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u8mf8(uint8_t *base, vuint32mf2_t bindex, + vuint8mf8_t value, size_t vl) { + return vsoxei32_v_u8mf8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u8mf4(uint8_t *base, vuint32m1_t bindex, vuint8mf4_t value, + size_t vl) { + return vsoxei32_v_u8mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u8mf2(uint8_t *base, vuint32m2_t bindex, vuint8mf2_t value, + size_t vl) { + return vsoxei32_v_u8mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u8m1(uint8_t *base, vuint32m4_t bindex, vuint8m1_t value, + size_t vl) { + return vsoxei32_v_u8m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i8.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u8m2(uint8_t *base, vuint32m8_t bindex, vuint8m2_t value, + size_t vl) { + return vsoxei32_v_u8m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i8.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u8mf8(uint8_t *base, vuint64m1_t bindex, vuint8mf8_t value, + size_t vl) { + return vsoxei64_v_u8mf8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i8.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u8mf4(uint8_t *base, vuint64m2_t bindex, vuint8mf4_t value, + size_t vl) { + return vsoxei64_v_u8mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i8.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u8mf2(uint8_t *base, vuint64m4_t bindex, vuint8mf2_t value, + size_t vl) { + return vsoxei64_v_u8mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i8.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u8m1(uint8_t *base, vuint64m8_t bindex, vuint8m1_t value, + size_t vl) { + return vsoxei64_v_u8m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u16mf4(uint16_t *base, vuint8mf8_t bindex, + vuint16mf4_t value, size_t vl) { + return vsoxei8_v_u16mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u16mf2(uint16_t *base, vuint8mf4_t bindex, + vuint16mf2_t value, size_t vl) { + return vsoxei8_v_u16mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u16m1(uint16_t *base, vuint8mf2_t bindex, vuint16m1_t value, + size_t vl) { + return vsoxei8_v_u16m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u16m2(uint16_t *base, vuint8m1_t bindex, vuint16m2_t value, + size_t vl) { + return vsoxei8_v_u16m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u16m4(uint16_t *base, vuint8m2_t bindex, vuint16m4_t value, + size_t vl) { + return vsoxei8_v_u16m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u16m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv32i16.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u16m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i16.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u16m8(uint16_t *base, vuint8m4_t bindex, vuint16m8_t value, + size_t vl) { + return vsoxei8_v_u16m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u16mf4(uint16_t *base, vuint16mf4_t bindex, + vuint16mf4_t value, size_t vl) { + return vsoxei16_v_u16mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u16mf2(uint16_t *base, vuint16mf2_t bindex, + vuint16mf2_t value, size_t vl) { + return vsoxei16_v_u16mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u16m1(uint16_t *base, vuint16m1_t bindex, + vuint16m1_t value, size_t vl) { + return vsoxei16_v_u16m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u16m2(uint16_t *base, vuint16m2_t bindex, + vuint16m2_t value, size_t vl) { + return vsoxei16_v_u16m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u16m4(uint16_t *base, vuint16m4_t bindex, + vuint16m4_t value, size_t vl) { + return vsoxei16_v_u16m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u16m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv32i16.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u16m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv32i16.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u16m8(uint16_t *base, vuint16m8_t bindex, + vuint16m8_t value, size_t vl) { + return vsoxei16_v_u16m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u16mf4(uint16_t *base, vuint32mf2_t bindex, + vuint16mf4_t value, size_t vl) { + return vsoxei32_v_u16mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u16mf2(uint16_t *base, vuint32m1_t bindex, + vuint16mf2_t value, size_t vl) { + return vsoxei32_v_u16mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u16m1(uint16_t *base, vuint32m2_t bindex, + vuint16m1_t value, size_t vl) { + return vsoxei32_v_u16m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u16m2(uint16_t *base, vuint32m4_t bindex, + vuint16m2_t value, size_t vl) { + return vsoxei32_v_u16m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i16.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u16m4(uint16_t *base, vuint32m8_t bindex, + vuint16m4_t value, size_t vl) { + return vsoxei32_v_u16m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i16.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u16mf4(uint16_t *base, vuint64m1_t bindex, + vuint16mf4_t value, size_t vl) { + return vsoxei64_v_u16mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i16.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u16mf2(uint16_t *base, vuint64m2_t bindex, + vuint16mf2_t value, size_t vl) { + return vsoxei64_v_u16mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i16.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u16m1(uint16_t *base, vuint64m4_t bindex, + vuint16m1_t value, size_t vl) { + return vsoxei64_v_u16m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i16.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u16m2(uint16_t *base, vuint64m8_t bindex, + vuint16m2_t value, size_t vl) { + return vsoxei64_v_u16m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u32mf2(uint32_t *base, vuint8mf8_t bindex, + vuint32mf2_t value, size_t vl) { + return vsoxei8_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u32m1(uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, + size_t vl) { + return vsoxei8_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u32m2(uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, + size_t vl) { + return vsoxei8_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u32m4(uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, + size_t vl) { + return vsoxei8_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u32m8(uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, + size_t vl) { + return vsoxei8_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u32mf2(uint32_t *base, vuint16mf4_t bindex, + vuint32mf2_t value, size_t vl) { + return vsoxei16_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u32m1(uint32_t *base, vuint16mf2_t bindex, + vuint32m1_t value, size_t vl) { + return vsoxei16_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u32m2(uint32_t *base, vuint16m1_t bindex, + vuint32m2_t value, size_t vl) { + return vsoxei16_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u32m4(uint32_t *base, vuint16m2_t bindex, + vuint32m4_t value, size_t vl) { + return vsoxei16_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u32m8(uint32_t *base, vuint16m4_t bindex, + vuint32m8_t value, size_t vl) { + return vsoxei16_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u32mf2(uint32_t *base, vuint32mf2_t bindex, + vuint32mf2_t value, size_t vl) { + return vsoxei32_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u32m1(uint32_t *base, vuint32m1_t bindex, + vuint32m1_t value, size_t vl) { + return vsoxei32_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u32m2(uint32_t *base, vuint32m2_t bindex, + vuint32m2_t value, size_t vl) { + return vsoxei32_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u32m4(uint32_t *base, vuint32m4_t bindex, + vuint32m4_t value, size_t vl) { + return vsoxei32_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16i32.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u32m8(uint32_t *base, vuint32m8_t bindex, + vuint32m8_t value, size_t vl) { + return vsoxei32_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i32.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u32mf2(uint32_t *base, vuint64m1_t bindex, + vuint32mf2_t value, size_t vl) { + return vsoxei64_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i32.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u32m1(uint32_t *base, vuint64m2_t bindex, + vuint32m1_t value, size_t vl) { + return vsoxei64_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i32.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u32m2(uint32_t *base, vuint64m4_t bindex, + vuint32m2_t value, size_t vl) { + return vsoxei64_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i32.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u32m4(uint32_t *base, vuint64m8_t bindex, + vuint32m4_t value, size_t vl) { + return vsoxei64_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u64m1(uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, + size_t vl) { + return vsoxei8_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u64m2(uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, + size_t vl) { + return vsoxei8_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u64m4(uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, + size_t vl) { + return vsoxei8_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u64m8(uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, + size_t vl) { + return vsoxei8_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u64m1(uint64_t *base, vuint16mf4_t bindex, + vuint64m1_t value, size_t vl) { + return vsoxei16_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u64m2(uint64_t *base, vuint16mf2_t bindex, + vuint64m2_t value, size_t vl) { + return vsoxei16_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u64m4(uint64_t *base, vuint16m1_t bindex, + vuint64m4_t value, size_t vl) { + return vsoxei16_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u64m8(uint64_t *base, vuint16m2_t bindex, + vuint64m8_t value, size_t vl) { + return vsoxei16_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u64m1(uint64_t *base, vuint32mf2_t bindex, + vuint64m1_t value, size_t vl) { + return vsoxei32_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u64m2(uint64_t *base, vuint32m1_t bindex, + vuint64m2_t value, size_t vl) { + return vsoxei32_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u64m4(uint64_t *base, vuint32m2_t bindex, + vuint64m4_t value, size_t vl) { + return vsoxei32_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u64m8(uint64_t *base, vuint32m4_t bindex, + vuint64m8_t value, size_t vl) { + return vsoxei32_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1i64.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u64m1(uint64_t *base, vuint64m1_t bindex, + vuint64m1_t value, size_t vl) { + return vsoxei64_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2i64.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u64m2(uint64_t *base, vuint64m2_t bindex, + vuint64m2_t value, size_t vl) { + return vsoxei64_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4i64.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u64m4(uint64_t *base, vuint64m4_t bindex, + vuint64m4_t value, size_t vl) { + return vsoxei64_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8i64.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u64m8(uint64_t *base, vuint64m8_t bindex, + vuint64m8_t value, size_t vl) { + return vsoxei64_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1f32.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f32.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f32mf2(float *base, vuint8mf8_t bindex, vfloat32mf2_t value, + size_t vl) { + return vsoxei8_v_f32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2f32.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f32.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f32m1(float *base, vuint8mf4_t bindex, vfloat32m1_t value, + size_t vl) { + return vsoxei8_v_f32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4f32.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f32.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f32m2(float *base, vuint8mf2_t bindex, vfloat32m2_t value, + size_t vl) { + return vsoxei8_v_f32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8f32.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f32.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f32m4(float *base, vuint8m1_t bindex, vfloat32m4_t value, + size_t vl) { + return vsoxei8_v_f32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16f32.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16f32.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f32m8(float *base, vuint8m2_t bindex, vfloat32m8_t value, + size_t vl) { + return vsoxei8_v_f32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1f32.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f32.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f32mf2(float *base, vuint16mf4_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsoxei16_v_f32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2f32.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f32.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f32m1(float *base, vuint16mf2_t bindex, vfloat32m1_t value, + size_t vl) { + return vsoxei16_v_f32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4f32.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f32.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f32m2(float *base, vuint16m1_t bindex, vfloat32m2_t value, + size_t vl) { + return vsoxei16_v_f32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8f32.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f32.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f32m4(float *base, vuint16m2_t bindex, vfloat32m4_t value, + size_t vl) { + return vsoxei16_v_f32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16f32.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16f32.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f32m8(float *base, vuint16m4_t bindex, vfloat32m8_t value, + size_t vl) { + return vsoxei16_v_f32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1f32.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f32.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f32mf2(float *base, vuint32mf2_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsoxei32_v_f32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2f32.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f32.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f32m1(float *base, vuint32m1_t bindex, vfloat32m1_t value, + size_t vl) { + return vsoxei32_v_f32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4f32.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f32.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f32m2(float *base, vuint32m2_t bindex, vfloat32m2_t value, + size_t vl) { + return vsoxei32_v_f32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8f32.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f32.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f32m4(float *base, vuint32m4_t bindex, vfloat32m4_t value, + size_t vl) { + return vsoxei32_v_f32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv16f32.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv16f32.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f32m8(float *base, vuint32m8_t bindex, vfloat32m8_t value, + size_t vl) { + return vsoxei32_v_f32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1f32.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f32.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f32mf2(float *base, vuint64m1_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsoxei64_v_f32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2f32.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f32.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f32m1(float *base, vuint64m2_t bindex, vfloat32m1_t value, + size_t vl) { + return vsoxei64_v_f32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4f32.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f32.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f32m2(float *base, vuint64m4_t bindex, vfloat32m2_t value, + size_t vl) { + return vsoxei64_v_f32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8f32.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f32.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f32m4(float *base, vuint64m8_t bindex, vfloat32m4_t value, + size_t vl) { + return vsoxei64_v_f32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1f64.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f64.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f64m1(double *base, vuint8mf8_t bindex, vfloat64m1_t value, + size_t vl) { + return vsoxei8_v_f64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2f64.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f64.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f64m2(double *base, vuint8mf4_t bindex, vfloat64m2_t value, + size_t vl) { + return vsoxei8_v_f64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4f64.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f64.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f64m4(double *base, vuint8mf2_t bindex, vfloat64m4_t value, + size_t vl) { + return vsoxei8_v_f64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8f64.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f64.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f64m8(double *base, vuint8m1_t bindex, vfloat64m8_t value, + size_t vl) { + return vsoxei8_v_f64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1f64.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f64.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f64m1(double *base, vuint16mf4_t bindex, + vfloat64m1_t value, size_t vl) { + return vsoxei16_v_f64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2f64.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f64.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f64m2(double *base, vuint16mf2_t bindex, + vfloat64m2_t value, size_t vl) { + return vsoxei16_v_f64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4f64.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f64.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f64m4(double *base, vuint16m1_t bindex, vfloat64m4_t value, + size_t vl) { + return vsoxei16_v_f64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8f64.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f64.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f64m8(double *base, vuint16m2_t bindex, vfloat64m8_t value, + size_t vl) { + return vsoxei16_v_f64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1f64.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f64.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f64m1(double *base, vuint32mf2_t bindex, + vfloat64m1_t value, size_t vl) { + return vsoxei32_v_f64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2f64.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f64.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f64m2(double *base, vuint32m1_t bindex, vfloat64m2_t value, + size_t vl) { + return vsoxei32_v_f64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4f64.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f64.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f64m4(double *base, vuint32m2_t bindex, vfloat64m4_t value, + size_t vl) { + return vsoxei32_v_f64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8f64.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f64.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f64m8(double *base, vuint32m4_t bindex, vfloat64m8_t value, + size_t vl) { + return vsoxei32_v_f64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv1f64.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv1f64.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f64m1(double *base, vuint64m1_t bindex, vfloat64m1_t value, + size_t vl) { + return vsoxei64_v_f64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv2f64.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv2f64.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f64m2(double *base, vuint64m2_t bindex, vfloat64m2_t value, + size_t vl) { + return vsoxei64_v_f64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv4f64.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv4f64.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f64m4(double *base, vuint64m4_t bindex, vfloat64m4_t value, + size_t vl) { + return vsoxei64_v_f64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.nxv8f64.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.nxv8f64.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f64m8(double *base, vuint64m8_t bindex, vfloat64m8_t value, + size_t vl) { + return vsoxei64_v_f64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8mf8_m(vbool64_t mask, int8_t *base, vuint8mf8_t bindex, + vint8mf8_t value, size_t vl) { + return vsoxei8_v_i8mf8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8mf4_m(vbool32_t mask, int8_t *base, vuint8mf4_t bindex, + vint8mf4_t value, size_t vl) { + return vsoxei8_v_i8mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8mf2_m(vbool16_t mask, int8_t *base, vuint8mf2_t bindex, + vint8mf2_t value, size_t vl) { + return vsoxei8_v_i8mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8m1_m(vbool8_t mask, int8_t *base, vuint8m1_t bindex, + vint8m1_t value, size_t vl) { + return vsoxei8_v_i8m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8m2_m(vbool4_t mask, int8_t *base, vuint8m2_t bindex, + vint8m2_t value, size_t vl) { + return vsoxei8_v_i8m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i8.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i8.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8m4_m(vbool2_t mask, int8_t *base, vuint8m4_t bindex, + vint8m4_t value, size_t vl) { + return vsoxei8_v_i8m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i8m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv64i8.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i8m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv64i8.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i8m8_m(vbool1_t mask, int8_t *base, vuint8m8_t bindex, + vint8m8_t value, size_t vl) { + return vsoxei8_v_i8m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i8mf8_m(vbool64_t mask, int8_t *base, vuint16mf4_t bindex, + vint8mf8_t value, size_t vl) { + return vsoxei16_v_i8mf8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i8mf4_m(vbool32_t mask, int8_t *base, vuint16mf2_t bindex, + vint8mf4_t value, size_t vl) { + return vsoxei16_v_i8mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i8mf2_m(vbool16_t mask, int8_t *base, vuint16m1_t bindex, + vint8mf2_t value, size_t vl) { + return vsoxei16_v_i8mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i8m1_m(vbool8_t mask, int8_t *base, vuint16m2_t bindex, + vint8m1_t value, size_t vl) { + return vsoxei16_v_i8m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i8m2_m(vbool4_t mask, int8_t *base, vuint16m4_t bindex, + vint8m2_t value, size_t vl) { + return vsoxei16_v_i8m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i8m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i8.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i8m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i8.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i8m4_m(vbool2_t mask, int8_t *base, vuint16m8_t bindex, + vint8m4_t value, size_t vl) { + return vsoxei16_v_i8m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i8mf8_m(vbool64_t mask, int8_t *base, vuint32mf2_t bindex, + vint8mf8_t value, size_t vl) { + return vsoxei32_v_i8mf8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i8mf4_m(vbool32_t mask, int8_t *base, vuint32m1_t bindex, + vint8mf4_t value, size_t vl) { + return vsoxei32_v_i8mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i8mf2_m(vbool16_t mask, int8_t *base, vuint32m2_t bindex, + vint8mf2_t value, size_t vl) { + return vsoxei32_v_i8mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i8m1_m(vbool8_t mask, int8_t *base, vuint32m4_t bindex, + vint8m1_t value, size_t vl) { + return vsoxei32_v_i8m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i8m2_m(vbool4_t mask, int8_t *base, vuint32m8_t bindex, + vint8m2_t value, size_t vl) { + return vsoxei32_v_i8m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i8mf8_m(vbool64_t mask, int8_t *base, vuint64m1_t bindex, + vint8mf8_t value, size_t vl) { + return vsoxei64_v_i8mf8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i8mf4_m(vbool32_t mask, int8_t *base, vuint64m2_t bindex, + vint8mf4_t value, size_t vl) { + return vsoxei64_v_i8mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i8mf2_m(vbool16_t mask, int8_t *base, vuint64m4_t bindex, + vint8mf2_t value, size_t vl) { + return vsoxei64_v_i8mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i8m1_m(vbool8_t mask, int8_t *base, vuint64m8_t bindex, + vint8m1_t value, size_t vl) { + return vsoxei64_v_i8m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i16mf4_m(vbool64_t mask, int16_t *base, vuint8mf8_t bindex, + vint16mf4_t value, size_t vl) { + return vsoxei8_v_i16mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i16mf2_m(vbool32_t mask, int16_t *base, vuint8mf4_t bindex, + vint16mf2_t value, size_t vl) { + return vsoxei8_v_i16mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i16m1_m(vbool16_t mask, int16_t *base, vuint8mf2_t bindex, + vint16m1_t value, size_t vl) { + return vsoxei8_v_i16m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i16m2_m(vbool8_t mask, int16_t *base, vuint8m1_t bindex, + vint16m2_t value, size_t vl) { + return vsoxei8_v_i16m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i16m4_m(vbool4_t mask, int16_t *base, vuint8m2_t bindex, + vint16m4_t value, size_t vl) { + return vsoxei8_v_i16m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i16m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i16.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i16m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i16.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i16m8_m(vbool2_t mask, int16_t *base, vuint8m4_t bindex, + vint16m8_t value, size_t vl) { + return vsoxei8_v_i16m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i16mf4_m(vbool64_t mask, int16_t *base, + vuint16mf4_t bindex, vint16mf4_t value, + size_t vl) { + return vsoxei16_v_i16mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i16mf2_m(vbool32_t mask, int16_t *base, + vuint16mf2_t bindex, vint16mf2_t value, + size_t vl) { + return vsoxei16_v_i16mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i16m1_m(vbool16_t mask, int16_t *base, vuint16m1_t bindex, + vint16m1_t value, size_t vl) { + return vsoxei16_v_i16m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i16m2_m(vbool8_t mask, int16_t *base, vuint16m2_t bindex, + vint16m2_t value, size_t vl) { + return vsoxei16_v_i16m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i16m4_m(vbool4_t mask, int16_t *base, vuint16m4_t bindex, + vint16m4_t value, size_t vl) { + return vsoxei16_v_i16m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i16m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i16.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i16m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i16.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i16m8_m(vbool2_t mask, int16_t *base, vuint16m8_t bindex, + vint16m8_t value, size_t vl) { + return vsoxei16_v_i16m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i16mf4_m(vbool64_t mask, int16_t *base, + vuint32mf2_t bindex, vint16mf4_t value, + size_t vl) { + return vsoxei32_v_i16mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i16mf2_m(vbool32_t mask, int16_t *base, vuint32m1_t bindex, + vint16mf2_t value, size_t vl) { + return vsoxei32_v_i16mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i16m1_m(vbool16_t mask, int16_t *base, vuint32m2_t bindex, + vint16m1_t value, size_t vl) { + return vsoxei32_v_i16m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i16m2_m(vbool8_t mask, int16_t *base, vuint32m4_t bindex, + vint16m2_t value, size_t vl) { + return vsoxei32_v_i16m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i16m4_m(vbool4_t mask, int16_t *base, vuint32m8_t bindex, + vint16m4_t value, size_t vl) { + return vsoxei32_v_i16m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i16mf4_m(vbool64_t mask, int16_t *base, vuint64m1_t bindex, + vint16mf4_t value, size_t vl) { + return vsoxei64_v_i16mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i16mf2_m(vbool32_t mask, int16_t *base, vuint64m2_t bindex, + vint16mf2_t value, size_t vl) { + return vsoxei64_v_i16mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i16m1_m(vbool16_t mask, int16_t *base, vuint64m4_t bindex, + vint16m1_t value, size_t vl) { + return vsoxei64_v_i16m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i16m2_m(vbool8_t mask, int16_t *base, vuint64m8_t bindex, + vint16m2_t value, size_t vl) { + return vsoxei64_v_i16m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i32mf2_m(vbool64_t mask, int32_t *base, vuint8mf8_t bindex, + vint32mf2_t value, size_t vl) { + return vsoxei8_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i32m1_m(vbool32_t mask, int32_t *base, vuint8mf4_t bindex, + vint32m1_t value, size_t vl) { + return vsoxei8_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i32m2_m(vbool16_t mask, int32_t *base, vuint8mf2_t bindex, + vint32m2_t value, size_t vl) { + return vsoxei8_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i32m4_m(vbool8_t mask, int32_t *base, vuint8m1_t bindex, + vint32m4_t value, size_t vl) { + return vsoxei8_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i32m8_m(vbool4_t mask, int32_t *base, vuint8m2_t bindex, + vint32m8_t value, size_t vl) { + return vsoxei8_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i32mf2_m(vbool64_t mask, int32_t *base, + vuint16mf4_t bindex, vint32mf2_t value, + size_t vl) { + return vsoxei16_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i32m1_m(vbool32_t mask, int32_t *base, vuint16mf2_t bindex, + vint32m1_t value, size_t vl) { + return vsoxei16_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i32m2_m(vbool16_t mask, int32_t *base, vuint16m1_t bindex, + vint32m2_t value, size_t vl) { + return vsoxei16_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i32m4_m(vbool8_t mask, int32_t *base, vuint16m2_t bindex, + vint32m4_t value, size_t vl) { + return vsoxei16_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i32m8_m(vbool4_t mask, int32_t *base, vuint16m4_t bindex, + vint32m8_t value, size_t vl) { + return vsoxei16_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i32mf2_m(vbool64_t mask, int32_t *base, + vuint32mf2_t bindex, vint32mf2_t value, + size_t vl) { + return vsoxei32_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i32m1_m(vbool32_t mask, int32_t *base, vuint32m1_t bindex, + vint32m1_t value, size_t vl) { + return vsoxei32_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i32m2_m(vbool16_t mask, int32_t *base, vuint32m2_t bindex, + vint32m2_t value, size_t vl) { + return vsoxei32_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i32m4_m(vbool8_t mask, int32_t *base, vuint32m4_t bindex, + vint32m4_t value, size_t vl) { + return vsoxei32_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i32m8_m(vbool4_t mask, int32_t *base, vuint32m8_t bindex, + vint32m8_t value, size_t vl) { + return vsoxei32_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i32mf2_m(vbool64_t mask, int32_t *base, vuint64m1_t bindex, + vint32mf2_t value, size_t vl) { + return vsoxei64_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i32m1_m(vbool32_t mask, int32_t *base, vuint64m2_t bindex, + vint32m1_t value, size_t vl) { + return vsoxei64_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i32m2_m(vbool16_t mask, int32_t *base, vuint64m4_t bindex, + vint32m2_t value, size_t vl) { + return vsoxei64_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i32m4_m(vbool8_t mask, int32_t *base, vuint64m8_t bindex, + vint32m4_t value, size_t vl) { + return vsoxei64_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i64m1_m(vbool64_t mask, int64_t *base, vuint8mf8_t bindex, + vint64m1_t value, size_t vl) { + return vsoxei8_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i64m2_m(vbool32_t mask, int64_t *base, vuint8mf4_t bindex, + vint64m2_t value, size_t vl) { + return vsoxei8_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i64m4_m(vbool16_t mask, int64_t *base, vuint8mf2_t bindex, + vint64m4_t value, size_t vl) { + return vsoxei8_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_i64m8_m(vbool8_t mask, int64_t *base, vuint8m1_t bindex, + vint64m8_t value, size_t vl) { + return vsoxei8_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i64m1_m(vbool64_t mask, int64_t *base, vuint16mf4_t bindex, + vint64m1_t value, size_t vl) { + return vsoxei16_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i64m2_m(vbool32_t mask, int64_t *base, vuint16mf2_t bindex, + vint64m2_t value, size_t vl) { + return vsoxei16_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i64m4_m(vbool16_t mask, int64_t *base, vuint16m1_t bindex, + vint64m4_t value, size_t vl) { + return vsoxei16_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_i64m8_m(vbool8_t mask, int64_t *base, vuint16m2_t bindex, + vint64m8_t value, size_t vl) { + return vsoxei16_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i64m1_m(vbool64_t mask, int64_t *base, vuint32mf2_t bindex, + vint64m1_t value, size_t vl) { + return vsoxei32_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i64m2_m(vbool32_t mask, int64_t *base, vuint32m1_t bindex, + vint64m2_t value, size_t vl) { + return vsoxei32_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i64m4_m(vbool16_t mask, int64_t *base, vuint32m2_t bindex, + vint64m4_t value, size_t vl) { + return vsoxei32_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_i64m8_m(vbool8_t mask, int64_t *base, vuint32m4_t bindex, + vint64m8_t value, size_t vl) { + return vsoxei32_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i64m1_m(vbool64_t mask, int64_t *base, vuint64m1_t bindex, + vint64m1_t value, size_t vl) { + return vsoxei64_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i64m2_m(vbool32_t mask, int64_t *base, vuint64m2_t bindex, + vint64m2_t value, size_t vl) { + return vsoxei64_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i64m4_m(vbool16_t mask, int64_t *base, vuint64m4_t bindex, + vint64m4_t value, size_t vl) { + return vsoxei64_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_i64m8_m(vbool8_t mask, int64_t *base, vuint64m8_t bindex, + vint64m8_t value, size_t vl) { + return vsoxei64_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8mf8_m(vbool64_t mask, uint8_t *base, vuint8mf8_t bindex, + vuint8mf8_t value, size_t vl) { + return vsoxei8_v_u8mf8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8mf4_m(vbool32_t mask, uint8_t *base, vuint8mf4_t bindex, + vuint8mf4_t value, size_t vl) { + return vsoxei8_v_u8mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8mf2_m(vbool16_t mask, uint8_t *base, vuint8mf2_t bindex, + vuint8mf2_t value, size_t vl) { + return vsoxei8_v_u8mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8m1_m(vbool8_t mask, uint8_t *base, vuint8m1_t bindex, + vuint8m1_t value, size_t vl) { + return vsoxei8_v_u8m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8m2_m(vbool4_t mask, uint8_t *base, vuint8m2_t bindex, + vuint8m2_t value, size_t vl) { + return vsoxei8_v_u8m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i8.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i8.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8m4_m(vbool2_t mask, uint8_t *base, vuint8m4_t bindex, + vuint8m4_t value, size_t vl) { + return vsoxei8_v_u8m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u8m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv64i8.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u8m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv64i8.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u8m8_m(vbool1_t mask, uint8_t *base, vuint8m8_t bindex, + vuint8m8_t value, size_t vl) { + return vsoxei8_v_u8m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u8mf8_m(vbool64_t mask, uint8_t *base, vuint16mf4_t bindex, + vuint8mf8_t value, size_t vl) { + return vsoxei16_v_u8mf8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u8mf4_m(vbool32_t mask, uint8_t *base, vuint16mf2_t bindex, + vuint8mf4_t value, size_t vl) { + return vsoxei16_v_u8mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u8mf2_m(vbool16_t mask, uint8_t *base, vuint16m1_t bindex, + vuint8mf2_t value, size_t vl) { + return vsoxei16_v_u8mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u8m1_m(vbool8_t mask, uint8_t *base, vuint16m2_t bindex, + vuint8m1_t value, size_t vl) { + return vsoxei16_v_u8m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u8m2_m(vbool4_t mask, uint8_t *base, vuint16m4_t bindex, + vuint8m2_t value, size_t vl) { + return vsoxei16_v_u8m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u8m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i8.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u8m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i8.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u8m4_m(vbool2_t mask, uint8_t *base, vuint16m8_t bindex, + vuint8m4_t value, size_t vl) { + return vsoxei16_v_u8m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u8mf8_m(vbool64_t mask, uint8_t *base, vuint32mf2_t bindex, + vuint8mf8_t value, size_t vl) { + return vsoxei32_v_u8mf8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u8mf4_m(vbool32_t mask, uint8_t *base, vuint32m1_t bindex, + vuint8mf4_t value, size_t vl) { + return vsoxei32_v_u8mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u8mf2_m(vbool16_t mask, uint8_t *base, vuint32m2_t bindex, + vuint8mf2_t value, size_t vl) { + return vsoxei32_v_u8mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u8m1_m(vbool8_t mask, uint8_t *base, vuint32m4_t bindex, + vuint8m1_t value, size_t vl) { + return vsoxei32_v_u8m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i8.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u8m2_m(vbool4_t mask, uint8_t *base, vuint32m8_t bindex, + vuint8m2_t value, size_t vl) { + return vsoxei32_v_u8m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i8.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u8mf8_m(vbool64_t mask, uint8_t *base, vuint64m1_t bindex, + vuint8mf8_t value, size_t vl) { + return vsoxei64_v_u8mf8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i8.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u8mf4_m(vbool32_t mask, uint8_t *base, vuint64m2_t bindex, + vuint8mf4_t value, size_t vl) { + return vsoxei64_v_u8mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i8.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u8mf2_m(vbool16_t mask, uint8_t *base, vuint64m4_t bindex, + vuint8mf2_t value, size_t vl) { + return vsoxei64_v_u8mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i8.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u8m1_m(vbool8_t mask, uint8_t *base, vuint64m8_t bindex, + vuint8m1_t value, size_t vl) { + return vsoxei64_v_u8m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u16mf4_m(vbool64_t mask, uint16_t *base, vuint8mf8_t bindex, + vuint16mf4_t value, size_t vl) { + return vsoxei8_v_u16mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u16mf2_m(vbool32_t mask, uint16_t *base, vuint8mf4_t bindex, + vuint16mf2_t value, size_t vl) { + return vsoxei8_v_u16mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u16m1_m(vbool16_t mask, uint16_t *base, vuint8mf2_t bindex, + vuint16m1_t value, size_t vl) { + return vsoxei8_v_u16m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u16m2_m(vbool8_t mask, uint16_t *base, vuint8m1_t bindex, + vuint16m2_t value, size_t vl) { + return vsoxei8_v_u16m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u16m4_m(vbool4_t mask, uint16_t *base, vuint8m2_t bindex, + vuint16m4_t value, size_t vl) { + return vsoxei8_v_u16m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u16m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i16.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u16m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i16.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u16m8_m(vbool2_t mask, uint16_t *base, vuint8m4_t bindex, + vuint16m8_t value, size_t vl) { + return vsoxei8_v_u16m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u16mf4_m(vbool64_t mask, uint16_t *base, + vuint16mf4_t bindex, vuint16mf4_t value, + size_t vl) { + return vsoxei16_v_u16mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u16mf2_m(vbool32_t mask, uint16_t *base, + vuint16mf2_t bindex, vuint16mf2_t value, + size_t vl) { + return vsoxei16_v_u16mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u16m1_m(vbool16_t mask, uint16_t *base, vuint16m1_t bindex, + vuint16m1_t value, size_t vl) { + return vsoxei16_v_u16m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u16m2_m(vbool8_t mask, uint16_t *base, vuint16m2_t bindex, + vuint16m2_t value, size_t vl) { + return vsoxei16_v_u16m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u16m4_m(vbool4_t mask, uint16_t *base, vuint16m4_t bindex, + vuint16m4_t value, size_t vl) { + return vsoxei16_v_u16m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u16m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i16.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u16m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv32i16.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u16m8_m(vbool2_t mask, uint16_t *base, vuint16m8_t bindex, + vuint16m8_t value, size_t vl) { + return vsoxei16_v_u16m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u16mf4_m(vbool64_t mask, uint16_t *base, + vuint32mf2_t bindex, vuint16mf4_t value, + size_t vl) { + return vsoxei32_v_u16mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u16mf2_m(vbool32_t mask, uint16_t *base, + vuint32m1_t bindex, vuint16mf2_t value, + size_t vl) { + return vsoxei32_v_u16mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u16m1_m(vbool16_t mask, uint16_t *base, vuint32m2_t bindex, + vuint16m1_t value, size_t vl) { + return vsoxei32_v_u16m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u16m2_m(vbool8_t mask, uint16_t *base, vuint32m4_t bindex, + vuint16m2_t value, size_t vl) { + return vsoxei32_v_u16m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i16.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u16m4_m(vbool4_t mask, uint16_t *base, vuint32m8_t bindex, + vuint16m4_t value, size_t vl) { + return vsoxei32_v_u16m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i16.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u16mf4_m(vbool64_t mask, uint16_t *base, + vuint64m1_t bindex, vuint16mf4_t value, + size_t vl) { + return vsoxei64_v_u16mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i16.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u16mf2_m(vbool32_t mask, uint16_t *base, + vuint64m2_t bindex, vuint16mf2_t value, + size_t vl) { + return vsoxei64_v_u16mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i16.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u16m1_m(vbool16_t mask, uint16_t *base, vuint64m4_t bindex, + vuint16m1_t value, size_t vl) { + return vsoxei64_v_u16m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i16.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u16m2_m(vbool8_t mask, uint16_t *base, vuint64m8_t bindex, + vuint16m2_t value, size_t vl) { + return vsoxei64_v_u16m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u32mf2_m(vbool64_t mask, uint32_t *base, vuint8mf8_t bindex, + vuint32mf2_t value, size_t vl) { + return vsoxei8_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u32m1_m(vbool32_t mask, uint32_t *base, vuint8mf4_t bindex, + vuint32m1_t value, size_t vl) { + return vsoxei8_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u32m2_m(vbool16_t mask, uint32_t *base, vuint8mf2_t bindex, + vuint32m2_t value, size_t vl) { + return vsoxei8_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u32m4_m(vbool8_t mask, uint32_t *base, vuint8m1_t bindex, + vuint32m4_t value, size_t vl) { + return vsoxei8_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u32m8_m(vbool4_t mask, uint32_t *base, vuint8m2_t bindex, + vuint32m8_t value, size_t vl) { + return vsoxei8_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u32mf2_m(vbool64_t mask, uint32_t *base, + vuint16mf4_t bindex, vuint32mf2_t value, + size_t vl) { + return vsoxei16_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u32m1_m(vbool32_t mask, uint32_t *base, + vuint16mf2_t bindex, vuint32m1_t value, + size_t vl) { + return vsoxei16_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u32m2_m(vbool16_t mask, uint32_t *base, vuint16m1_t bindex, + vuint32m2_t value, size_t vl) { + return vsoxei16_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u32m4_m(vbool8_t mask, uint32_t *base, vuint16m2_t bindex, + vuint32m4_t value, size_t vl) { + return vsoxei16_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u32m8_m(vbool4_t mask, uint32_t *base, vuint16m4_t bindex, + vuint32m8_t value, size_t vl) { + return vsoxei16_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u32mf2_m(vbool64_t mask, uint32_t *base, + vuint32mf2_t bindex, vuint32mf2_t value, + size_t vl) { + return vsoxei32_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u32m1_m(vbool32_t mask, uint32_t *base, vuint32m1_t bindex, + vuint32m1_t value, size_t vl) { + return vsoxei32_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u32m2_m(vbool16_t mask, uint32_t *base, vuint32m2_t bindex, + vuint32m2_t value, size_t vl) { + return vsoxei32_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u32m4_m(vbool8_t mask, uint32_t *base, vuint32m4_t bindex, + vuint32m4_t value, size_t vl) { + return vsoxei32_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16i32.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u32m8_m(vbool4_t mask, uint32_t *base, vuint32m8_t bindex, + vuint32m8_t value, size_t vl) { + return vsoxei32_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i32.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u32mf2_m(vbool64_t mask, uint32_t *base, + vuint64m1_t bindex, vuint32mf2_t value, + size_t vl) { + return vsoxei64_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i32.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u32m1_m(vbool32_t mask, uint32_t *base, vuint64m2_t bindex, + vuint32m1_t value, size_t vl) { + return vsoxei64_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i32.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u32m2_m(vbool16_t mask, uint32_t *base, vuint64m4_t bindex, + vuint32m2_t value, size_t vl) { + return vsoxei64_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i32.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u32m4_m(vbool8_t mask, uint32_t *base, vuint64m8_t bindex, + vuint32m4_t value, size_t vl) { + return vsoxei64_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u64m1_m(vbool64_t mask, uint64_t *base, vuint8mf8_t bindex, + vuint64m1_t value, size_t vl) { + return vsoxei8_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u64m2_m(vbool32_t mask, uint64_t *base, vuint8mf4_t bindex, + vuint64m2_t value, size_t vl) { + return vsoxei8_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u64m4_m(vbool16_t mask, uint64_t *base, vuint8mf2_t bindex, + vuint64m4_t value, size_t vl) { + return vsoxei8_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_u64m8_m(vbool8_t mask, uint64_t *base, vuint8m1_t bindex, + vuint64m8_t value, size_t vl) { + return vsoxei8_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u64m1_m(vbool64_t mask, uint64_t *base, + vuint16mf4_t bindex, vuint64m1_t value, + size_t vl) { + return vsoxei16_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u64m2_m(vbool32_t mask, uint64_t *base, + vuint16mf2_t bindex, vuint64m2_t value, + size_t vl) { + return vsoxei16_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u64m4_m(vbool16_t mask, uint64_t *base, vuint16m1_t bindex, + vuint64m4_t value, size_t vl) { + return vsoxei16_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_u64m8_m(vbool8_t mask, uint64_t *base, vuint16m2_t bindex, + vuint64m8_t value, size_t vl) { + return vsoxei16_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u64m1_m(vbool64_t mask, uint64_t *base, + vuint32mf2_t bindex, vuint64m1_t value, + size_t vl) { + return vsoxei32_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u64m2_m(vbool32_t mask, uint64_t *base, vuint32m1_t bindex, + vuint64m2_t value, size_t vl) { + return vsoxei32_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u64m4_m(vbool16_t mask, uint64_t *base, vuint32m2_t bindex, + vuint64m4_t value, size_t vl) { + return vsoxei32_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_u64m8_m(vbool8_t mask, uint64_t *base, vuint32m4_t bindex, + vuint64m8_t value, size_t vl) { + return vsoxei32_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1i64.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u64m1_m(vbool64_t mask, uint64_t *base, vuint64m1_t bindex, + vuint64m1_t value, size_t vl) { + return vsoxei64_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2i64.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u64m2_m(vbool32_t mask, uint64_t *base, vuint64m2_t bindex, + vuint64m2_t value, size_t vl) { + return vsoxei64_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4i64.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u64m4_m(vbool16_t mask, uint64_t *base, vuint64m4_t bindex, + vuint64m4_t value, size_t vl) { + return vsoxei64_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8i64.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_u64m8_m(vbool8_t mask, uint64_t *base, vuint64m8_t bindex, + vuint64m8_t value, size_t vl) { + return vsoxei64_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f32.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f32.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f32mf2_m(vbool64_t mask, float *base, vuint8mf8_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsoxei8_v_f32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f32.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f32.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f32m1_m(vbool32_t mask, float *base, vuint8mf4_t bindex, + vfloat32m1_t value, size_t vl) { + return vsoxei8_v_f32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f32.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f32.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f32m2_m(vbool16_t mask, float *base, vuint8mf2_t bindex, + vfloat32m2_t value, size_t vl) { + return vsoxei8_v_f32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f32.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f32.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f32m4_m(vbool8_t mask, float *base, vuint8m1_t bindex, + vfloat32m4_t value, size_t vl) { + return vsoxei8_v_f32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16f32.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16f32.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f32m8_m(vbool4_t mask, float *base, vuint8m2_t bindex, + vfloat32m8_t value, size_t vl) { + return vsoxei8_v_f32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f32.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f32.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f32mf2_m(vbool64_t mask, float *base, vuint16mf4_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsoxei16_v_f32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f32.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f32.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f32m1_m(vbool32_t mask, float *base, vuint16mf2_t bindex, + vfloat32m1_t value, size_t vl) { + return vsoxei16_v_f32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f32.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f32.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f32m2_m(vbool16_t mask, float *base, vuint16m1_t bindex, + vfloat32m2_t value, size_t vl) { + return vsoxei16_v_f32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f32.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f32.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f32m4_m(vbool8_t mask, float *base, vuint16m2_t bindex, + vfloat32m4_t value, size_t vl) { + return vsoxei16_v_f32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16f32.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16f32.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f32m8_m(vbool4_t mask, float *base, vuint16m4_t bindex, + vfloat32m8_t value, size_t vl) { + return vsoxei16_v_f32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f32.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f32.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f32mf2_m(vbool64_t mask, float *base, vuint32mf2_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsoxei32_v_f32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f32.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f32.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f32m1_m(vbool32_t mask, float *base, vuint32m1_t bindex, + vfloat32m1_t value, size_t vl) { + return vsoxei32_v_f32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f32.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f32.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f32m2_m(vbool16_t mask, float *base, vuint32m2_t bindex, + vfloat32m2_t value, size_t vl) { + return vsoxei32_v_f32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f32.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f32.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f32m4_m(vbool8_t mask, float *base, vuint32m4_t bindex, + vfloat32m4_t value, size_t vl) { + return vsoxei32_v_f32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16f32.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv16f32.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f32m8_m(vbool4_t mask, float *base, vuint32m8_t bindex, + vfloat32m8_t value, size_t vl) { + return vsoxei32_v_f32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f32.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f32.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f32mf2_m(vbool64_t mask, float *base, vuint64m1_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsoxei64_v_f32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f32.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f32.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f32m1_m(vbool32_t mask, float *base, vuint64m2_t bindex, + vfloat32m1_t value, size_t vl) { + return vsoxei64_v_f32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f32.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f32.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f32m2_m(vbool16_t mask, float *base, vuint64m4_t bindex, + vfloat32m2_t value, size_t vl) { + return vsoxei64_v_f32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f32.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f32.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f32m4_m(vbool8_t mask, float *base, vuint64m8_t bindex, + vfloat32m4_t value, size_t vl) { + return vsoxei64_v_f32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f64.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f64.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f64m1_m(vbool64_t mask, double *base, vuint8mf8_t bindex, + vfloat64m1_t value, size_t vl) { + return vsoxei8_v_f64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f64.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f64.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f64m2_m(vbool32_t mask, double *base, vuint8mf4_t bindex, + vfloat64m2_t value, size_t vl) { + return vsoxei8_v_f64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f64.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f64.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f64m4_m(vbool16_t mask, double *base, vuint8mf2_t bindex, + vfloat64m4_t value, size_t vl) { + return vsoxei8_v_f64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei8_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f64.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei8_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f64.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei8_v_f64m8_m(vbool8_t mask, double *base, vuint8m1_t bindex, + vfloat64m8_t value, size_t vl) { + return vsoxei8_v_f64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f64.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f64.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f64m1_m(vbool64_t mask, double *base, vuint16mf4_t bindex, + vfloat64m1_t value, size_t vl) { + return vsoxei16_v_f64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f64.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f64.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f64m2_m(vbool32_t mask, double *base, vuint16mf2_t bindex, + vfloat64m2_t value, size_t vl) { + return vsoxei16_v_f64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f64.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f64.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f64m4_m(vbool16_t mask, double *base, vuint16m1_t bindex, + vfloat64m4_t value, size_t vl) { + return vsoxei16_v_f64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei16_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f64.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei16_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f64.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei16_v_f64m8_m(vbool8_t mask, double *base, vuint16m2_t bindex, + vfloat64m8_t value, size_t vl) { + return vsoxei16_v_f64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f64.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f64.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f64m1_m(vbool64_t mask, double *base, vuint32mf2_t bindex, + vfloat64m1_t value, size_t vl) { + return vsoxei32_v_f64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f64.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f64.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f64m2_m(vbool32_t mask, double *base, vuint32m1_t bindex, + vfloat64m2_t value, size_t vl) { + return vsoxei32_v_f64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f64.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f64.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f64m4_m(vbool16_t mask, double *base, vuint32m2_t bindex, + vfloat64m4_t value, size_t vl) { + return vsoxei32_v_f64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei32_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f64.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei32_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f64.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei32_v_f64m8_m(vbool8_t mask, double *base, vuint32m4_t bindex, + vfloat64m8_t value, size_t vl) { + return vsoxei32_v_f64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f64.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv1f64.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f64m1_m(vbool64_t mask, double *base, vuint64m1_t bindex, + vfloat64m1_t value, size_t vl) { + return vsoxei64_v_f64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f64.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv2f64.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f64m2_m(vbool32_t mask, double *base, vuint64m2_t bindex, + vfloat64m2_t value, size_t vl) { + return vsoxei64_v_f64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f64.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv4f64.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f64m4_m(vbool16_t mask, double *base, vuint64m4_t bindex, + vfloat64m4_t value, size_t vl) { + return vsoxei64_v_f64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsoxei64_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f64.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsoxei64_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsoxei.mask.nxv8f64.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsoxei64_v_f64m8_m(vbool8_t mask, double *base, vuint64m8_t bindex, + vfloat64m8_t value, size_t vl) { + return vsoxei64_v_f64m8_m(mask, base, bindex, value, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics/vsse.c b/clang/test/CodeGen/RISCV/rvv-intrinsics/vsse.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics/vsse.c @@ -0,0 +1,1813 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv32 -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV32 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -Werror -Wall -o - %s -S >/dev/null 2>&1 | FileCheck --check-prefix=ASM --allow-empty %s + +// ASM-NOT: warning +#include + +// CHECK-RV32-LABEL: @test_vsse8_v_i8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8mf8(int8_t *base, ptrdiff_t bstride, vint8mf8_t value, + size_t vl) { + return vsse8_v_i8mf8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8mf4(int8_t *base, ptrdiff_t bstride, vint8mf4_t value, + size_t vl) { + return vsse8_v_i8mf4(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8mf2(int8_t *base, ptrdiff_t bstride, vint8mf2_t value, + size_t vl) { + return vsse8_v_i8mf2(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8m1(int8_t *base, ptrdiff_t bstride, vint8m1_t value, + size_t vl) { + return vsse8_v_i8m1(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8m2(int8_t *base, ptrdiff_t bstride, vint8m2_t value, + size_t vl) { + return vsse8_v_i8m2(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8m4(int8_t *base, ptrdiff_t bstride, vint8m4_t value, + size_t vl) { + return vsse8_v_i8m4(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8m8(int8_t *base, ptrdiff_t bstride, vint8m8_t value, + size_t vl) { + return vsse8_v_i8m8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_i16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_i16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_i16mf4(int16_t *base, ptrdiff_t bstride, vint16mf4_t value, + size_t vl) { + return vsse16_v_i16mf4(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_i16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_i16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_i16mf2(int16_t *base, ptrdiff_t bstride, vint16mf2_t value, + size_t vl) { + return vsse16_v_i16mf2(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_i16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_i16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_i16m1(int16_t *base, ptrdiff_t bstride, vint16m1_t value, + size_t vl) { + return vsse16_v_i16m1(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_i16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_i16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_i16m2(int16_t *base, ptrdiff_t bstride, vint16m2_t value, + size_t vl) { + return vsse16_v_i16m2(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_i16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_i16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_i16m4(int16_t *base, ptrdiff_t bstride, vint16m4_t value, + size_t vl) { + return vsse16_v_i16m4(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_i16m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_i16m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_i16m8(int16_t *base, ptrdiff_t bstride, vint16m8_t value, + size_t vl) { + return vsse16_v_i16m8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_i32mf2(int32_t *base, ptrdiff_t bstride, vint32mf2_t value, + size_t vl) { + return vsse32_v_i32mf2(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_i32m1(int32_t *base, ptrdiff_t bstride, vint32m1_t value, + size_t vl) { + return vsse32_v_i32m1(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_i32m2(int32_t *base, ptrdiff_t bstride, vint32m2_t value, + size_t vl) { + return vsse32_v_i32m2(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_i32m4(int32_t *base, ptrdiff_t bstride, vint32m4_t value, + size_t vl) { + return vsse32_v_i32m4(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_i32m8(int32_t *base, ptrdiff_t bstride, vint32m8_t value, + size_t vl) { + return vsse32_v_i32m8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_i64m1(int64_t *base, ptrdiff_t bstride, vint64m1_t value, + size_t vl) { + return vsse64_v_i64m1(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_i64m2(int64_t *base, ptrdiff_t bstride, vint64m2_t value, + size_t vl) { + return vsse64_v_i64m2(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_i64m4(int64_t *base, ptrdiff_t bstride, vint64m4_t value, + size_t vl) { + return vsse64_v_i64m4(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_i64m8(int64_t *base, ptrdiff_t bstride, vint64m8_t value, + size_t vl) { + return vsse64_v_i64m8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8mf8(uint8_t *base, ptrdiff_t bstride, vuint8mf8_t value, + size_t vl) { + return vsse8_v_u8mf8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8mf4(uint8_t *base, ptrdiff_t bstride, vuint8mf4_t value, + size_t vl) { + return vsse8_v_u8mf4(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8mf2(uint8_t *base, ptrdiff_t bstride, vuint8mf2_t value, + size_t vl) { + return vsse8_v_u8mf2(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8m1(uint8_t *base, ptrdiff_t bstride, vuint8m1_t value, + size_t vl) { + return vsse8_v_u8m1(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8m2(uint8_t *base, ptrdiff_t bstride, vuint8m2_t value, + size_t vl) { + return vsse8_v_u8m2(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8m4(uint8_t *base, ptrdiff_t bstride, vuint8m4_t value, + size_t vl) { + return vsse8_v_u8m4(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8m8(uint8_t *base, ptrdiff_t bstride, vuint8m8_t value, + size_t vl) { + return vsse8_v_u8m8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_u16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_u16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_u16mf4(uint16_t *base, ptrdiff_t bstride, vuint16mf4_t value, + size_t vl) { + return vsse16_v_u16mf4(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_u16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_u16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_u16mf2(uint16_t *base, ptrdiff_t bstride, vuint16mf2_t value, + size_t vl) { + return vsse16_v_u16mf2(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_u16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_u16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_u16m1(uint16_t *base, ptrdiff_t bstride, vuint16m1_t value, + size_t vl) { + return vsse16_v_u16m1(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_u16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_u16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_u16m2(uint16_t *base, ptrdiff_t bstride, vuint16m2_t value, + size_t vl) { + return vsse16_v_u16m2(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_u16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_u16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_u16m4(uint16_t *base, ptrdiff_t bstride, vuint16m4_t value, + size_t vl) { + return vsse16_v_u16m4(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_u16m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_u16m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_u16m8(uint16_t *base, ptrdiff_t bstride, vuint16m8_t value, + size_t vl) { + return vsse16_v_u16m8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_u32mf2(uint32_t *base, ptrdiff_t bstride, vuint32mf2_t value, + size_t vl) { + return vsse32_v_u32mf2(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_u32m1(uint32_t *base, ptrdiff_t bstride, vuint32m1_t value, + size_t vl) { + return vsse32_v_u32m1(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_u32m2(uint32_t *base, ptrdiff_t bstride, vuint32m2_t value, + size_t vl) { + return vsse32_v_u32m2(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_u32m4(uint32_t *base, ptrdiff_t bstride, vuint32m4_t value, + size_t vl) { + return vsse32_v_u32m4(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_u32m8(uint32_t *base, ptrdiff_t bstride, vuint32m8_t value, + size_t vl) { + return vsse32_v_u32m8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_u64m1(uint64_t *base, ptrdiff_t bstride, vuint64m1_t value, + size_t vl) { + return vsse64_v_u64m1(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_u64m2(uint64_t *base, ptrdiff_t bstride, vuint64m2_t value, + size_t vl) { + return vsse64_v_u64m2(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_u64m4(uint64_t *base, ptrdiff_t bstride, vuint64m4_t value, + size_t vl) { + return vsse64_v_u64m4(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_u64m8(uint64_t *base, ptrdiff_t bstride, vuint64m8_t value, + size_t vl) { + return vsse64_v_u64m8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv1f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv1f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_f32mf2(float *base, ptrdiff_t bstride, vfloat32mf2_t value, + size_t vl) { + return vsse32_v_f32mf2(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv2f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv2f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_f32m1(float *base, ptrdiff_t bstride, vfloat32m1_t value, + size_t vl) { + return vsse32_v_f32m1(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv4f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv4f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_f32m2(float *base, ptrdiff_t bstride, vfloat32m2_t value, + size_t vl) { + return vsse32_v_f32m2(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv8f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv8f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_f32m4(float *base, ptrdiff_t bstride, vfloat32m4_t value, + size_t vl) { + return vsse32_v_f32m4(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_f32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv16f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_f32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv16f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_f32m8(float *base, ptrdiff_t bstride, vfloat32m8_t value, + size_t vl) { + return vsse32_v_f32m8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv1f64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv1f64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_f64m1(double *base, ptrdiff_t bstride, vfloat64m1_t value, + size_t vl) { + return vsse64_v_f64m1(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv2f64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv2f64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_f64m2(double *base, ptrdiff_t bstride, vfloat64m2_t value, + size_t vl) { + return vsse64_v_f64m2(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv4f64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv4f64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_f64m4(double *base, ptrdiff_t bstride, vfloat64m4_t value, + size_t vl) { + return vsse64_v_f64m4(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.nxv8f64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.nxv8f64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_f64m8(double *base, ptrdiff_t bstride, vfloat64m8_t value, + size_t vl) { + return vsse64_v_f64m8(base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8mf8_m(vbool64_t mask, int8_t *base, ptrdiff_t bstride, + vint8mf8_t value, size_t vl) { + return vsse8_v_i8mf8_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8mf4_m(vbool32_t mask, int8_t *base, ptrdiff_t bstride, + vint8mf4_t value, size_t vl) { + return vsse8_v_i8mf4_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8mf2_m(vbool16_t mask, int8_t *base, ptrdiff_t bstride, + vint8mf2_t value, size_t vl) { + return vsse8_v_i8mf2_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8m1_m(vbool8_t mask, int8_t *base, ptrdiff_t bstride, + vint8m1_t value, size_t vl) { + return vsse8_v_i8m1_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8m2_m(vbool4_t mask, int8_t *base, ptrdiff_t bstride, + vint8m2_t value, size_t vl) { + return vsse8_v_i8m2_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8m4_m(vbool2_t mask, int8_t *base, ptrdiff_t bstride, + vint8m4_t value, size_t vl) { + return vsse8_v_i8m4_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_i8m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_i8m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_i8m8_m(vbool1_t mask, int8_t *base, ptrdiff_t bstride, + vint8m8_t value, size_t vl) { + return vsse8_v_i8m8_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_i16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_i16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_i16mf4_m(vbool64_t mask, int16_t *base, ptrdiff_t bstride, + vint16mf4_t value, size_t vl) { + return vsse16_v_i16mf4_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_i16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_i16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_i16mf2_m(vbool32_t mask, int16_t *base, ptrdiff_t bstride, + vint16mf2_t value, size_t vl) { + return vsse16_v_i16mf2_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_i16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_i16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_i16m1_m(vbool16_t mask, int16_t *base, ptrdiff_t bstride, + vint16m1_t value, size_t vl) { + return vsse16_v_i16m1_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_i16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_i16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_i16m2_m(vbool8_t mask, int16_t *base, ptrdiff_t bstride, + vint16m2_t value, size_t vl) { + return vsse16_v_i16m2_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_i16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_i16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_i16m4_m(vbool4_t mask, int16_t *base, ptrdiff_t bstride, + vint16m4_t value, size_t vl) { + return vsse16_v_i16m4_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_i16m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_i16m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_i16m8_m(vbool2_t mask, int16_t *base, ptrdiff_t bstride, + vint16m8_t value, size_t vl) { + return vsse16_v_i16m8_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_i32mf2_m(vbool64_t mask, int32_t *base, ptrdiff_t bstride, + vint32mf2_t value, size_t vl) { + return vsse32_v_i32mf2_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_i32m1_m(vbool32_t mask, int32_t *base, ptrdiff_t bstride, + vint32m1_t value, size_t vl) { + return vsse32_v_i32m1_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_i32m2_m(vbool16_t mask, int32_t *base, ptrdiff_t bstride, + vint32m2_t value, size_t vl) { + return vsse32_v_i32m2_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_i32m4_m(vbool8_t mask, int32_t *base, ptrdiff_t bstride, + vint32m4_t value, size_t vl) { + return vsse32_v_i32m4_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_i32m8_m(vbool4_t mask, int32_t *base, ptrdiff_t bstride, + vint32m8_t value, size_t vl) { + return vsse32_v_i32m8_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_i64m1_m(vbool64_t mask, int64_t *base, ptrdiff_t bstride, + vint64m1_t value, size_t vl) { + return vsse64_v_i64m1_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_i64m2_m(vbool32_t mask, int64_t *base, ptrdiff_t bstride, + vint64m2_t value, size_t vl) { + return vsse64_v_i64m2_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_i64m4_m(vbool16_t mask, int64_t *base, ptrdiff_t bstride, + vint64m4_t value, size_t vl) { + return vsse64_v_i64m4_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_i64m8_m(vbool8_t mask, int64_t *base, ptrdiff_t bstride, + vint64m8_t value, size_t vl) { + return vsse64_v_i64m8_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8mf8_m(vbool64_t mask, uint8_t *base, ptrdiff_t bstride, + vuint8mf8_t value, size_t vl) { + return vsse8_v_u8mf8_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8mf4_m(vbool32_t mask, uint8_t *base, ptrdiff_t bstride, + vuint8mf4_t value, size_t vl) { + return vsse8_v_u8mf4_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8mf2_m(vbool16_t mask, uint8_t *base, ptrdiff_t bstride, + vuint8mf2_t value, size_t vl) { + return vsse8_v_u8mf2_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8m1_m(vbool8_t mask, uint8_t *base, ptrdiff_t bstride, + vuint8m1_t value, size_t vl) { + return vsse8_v_u8m1_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8m2_m(vbool4_t mask, uint8_t *base, ptrdiff_t bstride, + vuint8m2_t value, size_t vl) { + return vsse8_v_u8m2_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8m4_m(vbool2_t mask, uint8_t *base, ptrdiff_t bstride, + vuint8m4_t value, size_t vl) { + return vsse8_v_u8m4_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse8_v_u8m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse8_v_u8m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse8_v_u8m8_m(vbool1_t mask, uint8_t *base, ptrdiff_t bstride, + vuint8m8_t value, size_t vl) { + return vsse8_v_u8m8_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_u16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_u16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_u16mf4_m(vbool64_t mask, uint16_t *base, ptrdiff_t bstride, + vuint16mf4_t value, size_t vl) { + return vsse16_v_u16mf4_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_u16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_u16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_u16mf2_m(vbool32_t mask, uint16_t *base, ptrdiff_t bstride, + vuint16mf2_t value, size_t vl) { + return vsse16_v_u16mf2_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_u16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_u16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_u16m1_m(vbool16_t mask, uint16_t *base, ptrdiff_t bstride, + vuint16m1_t value, size_t vl) { + return vsse16_v_u16m1_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_u16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_u16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_u16m2_m(vbool8_t mask, uint16_t *base, ptrdiff_t bstride, + vuint16m2_t value, size_t vl) { + return vsse16_v_u16m2_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_u16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_u16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_u16m4_m(vbool4_t mask, uint16_t *base, ptrdiff_t bstride, + vuint16m4_t value, size_t vl) { + return vsse16_v_u16m4_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse16_v_u16m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse16_v_u16m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse16_v_u16m8_m(vbool2_t mask, uint16_t *base, ptrdiff_t bstride, + vuint16m8_t value, size_t vl) { + return vsse16_v_u16m8_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_u32mf2_m(vbool64_t mask, uint32_t *base, ptrdiff_t bstride, + vuint32mf2_t value, size_t vl) { + return vsse32_v_u32mf2_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_u32m1_m(vbool32_t mask, uint32_t *base, ptrdiff_t bstride, + vuint32m1_t value, size_t vl) { + return vsse32_v_u32m1_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_u32m2_m(vbool16_t mask, uint32_t *base, ptrdiff_t bstride, + vuint32m2_t value, size_t vl) { + return vsse32_v_u32m2_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_u32m4_m(vbool8_t mask, uint32_t *base, ptrdiff_t bstride, + vuint32m4_t value, size_t vl) { + return vsse32_v_u32m4_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_u32m8_m(vbool4_t mask, uint32_t *base, ptrdiff_t bstride, + vuint32m8_t value, size_t vl) { + return vsse32_v_u32m8_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_u64m1_m(vbool64_t mask, uint64_t *base, ptrdiff_t bstride, + vuint64m1_t value, size_t vl) { + return vsse64_v_u64m1_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_u64m2_m(vbool32_t mask, uint64_t *base, ptrdiff_t bstride, + vuint64m2_t value, size_t vl) { + return vsse64_v_u64m2_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_u64m4_m(vbool16_t mask, uint64_t *base, ptrdiff_t bstride, + vuint64m4_t value, size_t vl) { + return vsse64_v_u64m4_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_u64m8_m(vbool8_t mask, uint64_t *base, ptrdiff_t bstride, + vuint64m8_t value, size_t vl) { + return vsse64_v_u64m8_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv1f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv1f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_f32mf2_m(vbool64_t mask, float *base, ptrdiff_t bstride, + vfloat32mf2_t value, size_t vl) { + return vsse32_v_f32mf2_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv2f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv2f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_f32m1_m(vbool32_t mask, float *base, ptrdiff_t bstride, + vfloat32m1_t value, size_t vl) { + return vsse32_v_f32m1_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv4f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv4f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_f32m2_m(vbool16_t mask, float *base, ptrdiff_t bstride, + vfloat32m2_t value, size_t vl) { + return vsse32_v_f32m2_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv8f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv8f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_f32m4_m(vbool8_t mask, float *base, ptrdiff_t bstride, + vfloat32m4_t value, size_t vl) { + return vsse32_v_f32m4_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse32_v_f32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv16f32.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse32_v_f32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv16f32.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse32_v_f32m8_m(vbool4_t mask, float *base, ptrdiff_t bstride, + vfloat32m8_t value, size_t vl) { + return vsse32_v_f32m8_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv1f64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv1f64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_f64m1_m(vbool64_t mask, double *base, ptrdiff_t bstride, + vfloat64m1_t value, size_t vl) { + return vsse64_v_f64m1_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv2f64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv2f64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_f64m2_m(vbool32_t mask, double *base, ptrdiff_t bstride, + vfloat64m2_t value, size_t vl) { + return vsse64_v_f64m2_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv4f64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv4f64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_f64m4_m(vbool16_t mask, double *base, ptrdiff_t bstride, + vfloat64m4_t value, size_t vl) { + return vsse64_v_f64m4_m(mask, base, bstride, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsse64_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsse.mask.nxv8f64.i32( [[VALUE:%.*]], * [[TMP0]], i32 [[BSTRIDE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsse64_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsse.mask.nxv8f64.i64( [[VALUE:%.*]], * [[TMP0]], i64 [[BSTRIDE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsse64_v_f64m8_m(vbool8_t mask, double *base, ptrdiff_t bstride, + vfloat64m8_t value, size_t vl) { + return vsse64_v_f64m8_m(mask, base, bstride, value, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics/vsuxei.c b/clang/test/CodeGen/RISCV/rvv-intrinsics/vsuxei.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics/vsuxei.c @@ -0,0 +1,6523 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv32 -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV32 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -Werror -Wall -o - %s -S >/dev/null 2>&1 | FileCheck --check-prefix=ASM --allow-empty %s + +// ASM-NOT: warning +#include + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8mf8(int8_t *base, vuint8mf8_t bindex, vint8mf8_t value, + size_t vl) { + return vsuxei8_v_i8mf8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8mf4(int8_t *base, vuint8mf4_t bindex, vint8mf4_t value, + size_t vl) { + return vsuxei8_v_i8mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8mf2(int8_t *base, vuint8mf2_t bindex, vint8mf2_t value, + size_t vl) { + return vsuxei8_v_i8mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8m1(int8_t *base, vuint8m1_t bindex, vint8m1_t value, + size_t vl) { + return vsuxei8_v_i8m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i8.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i8.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8m2(int8_t *base, vuint8m2_t bindex, vint8m2_t value, + size_t vl) { + return vsuxei8_v_i8m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv32i8.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv32i8.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8m4(int8_t *base, vuint8m4_t bindex, vint8m4_t value, + size_t vl) { + return vsuxei8_v_i8m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv64i8.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv64i8.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8m8(int8_t *base, vuint8m8_t bindex, vint8m8_t value, + size_t vl) { + return vsuxei8_v_i8m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i8mf8(int8_t *base, vuint16mf4_t bindex, vint8mf8_t value, + size_t vl) { + return vsuxei16_v_i8mf8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i8mf4(int8_t *base, vuint16mf2_t bindex, vint8mf4_t value, + size_t vl) { + return vsuxei16_v_i8mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i8mf2(int8_t *base, vuint16m1_t bindex, vint8mf2_t value, + size_t vl) { + return vsuxei16_v_i8mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i8m1(int8_t *base, vuint16m2_t bindex, vint8m1_t value, + size_t vl) { + return vsuxei16_v_i8m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i8.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i8.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i8m2(int8_t *base, vuint16m4_t bindex, vint8m2_t value, + size_t vl) { + return vsuxei16_v_i8m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i8m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv32i8.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i8m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv32i8.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i8m4(int8_t *base, vuint16m8_t bindex, vint8m4_t value, + size_t vl) { + return vsuxei16_v_i8m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i8mf8(int8_t *base, vuint32mf2_t bindex, vint8mf8_t value, + size_t vl) { + return vsuxei32_v_i8mf8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i8mf4(int8_t *base, vuint32m1_t bindex, vint8mf4_t value, + size_t vl) { + return vsuxei32_v_i8mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i8mf2(int8_t *base, vuint32m2_t bindex, vint8mf2_t value, + size_t vl) { + return vsuxei32_v_i8mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i8m1(int8_t *base, vuint32m4_t bindex, vint8m1_t value, + size_t vl) { + return vsuxei32_v_i8m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i8.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i8.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i8m2(int8_t *base, vuint32m8_t bindex, vint8m2_t value, + size_t vl) { + return vsuxei32_v_i8m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i8mf8(int8_t *base, vuint64m1_t bindex, vint8mf8_t value, + size_t vl) { + return vsuxei64_v_i8mf8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i8mf4(int8_t *base, vuint64m2_t bindex, vint8mf4_t value, + size_t vl) { + return vsuxei64_v_i8mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i8mf2(int8_t *base, vuint64m4_t bindex, vint8mf2_t value, + size_t vl) { + return vsuxei64_v_i8mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i8m1(int8_t *base, vuint64m8_t bindex, vint8m1_t value, + size_t vl) { + return vsuxei64_v_i8m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i16mf4(int16_t *base, vuint8mf8_t bindex, vint16mf4_t value, + size_t vl) { + return vsuxei8_v_i16mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i16mf2(int16_t *base, vuint8mf4_t bindex, vint16mf2_t value, + size_t vl) { + return vsuxei8_v_i16mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i16m1(int16_t *base, vuint8mf2_t bindex, vint16m1_t value, + size_t vl) { + return vsuxei8_v_i16m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i16m2(int16_t *base, vuint8m1_t bindex, vint16m2_t value, + size_t vl) { + return vsuxei8_v_i16m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i16.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i16.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i16m4(int16_t *base, vuint8m2_t bindex, vint16m4_t value, + size_t vl) { + return vsuxei8_v_i16m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i16m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv32i16.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i16m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv32i16.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i16m8(int16_t *base, vuint8m4_t bindex, vint16m8_t value, + size_t vl) { + return vsuxei8_v_i16m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i16mf4(int16_t *base, vuint16mf4_t bindex, + vint16mf4_t value, size_t vl) { + return vsuxei16_v_i16mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i16mf2(int16_t *base, vuint16mf2_t bindex, + vint16mf2_t value, size_t vl) { + return vsuxei16_v_i16mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i16m1(int16_t *base, vuint16m1_t bindex, vint16m1_t value, + size_t vl) { + return vsuxei16_v_i16m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i16m2(int16_t *base, vuint16m2_t bindex, vint16m2_t value, + size_t vl) { + return vsuxei16_v_i16m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i16.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i16.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i16m4(int16_t *base, vuint16m4_t bindex, vint16m4_t value, + size_t vl) { + return vsuxei16_v_i16m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i16m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv32i16.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i16m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv32i16.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i16m8(int16_t *base, vuint16m8_t bindex, vint16m8_t value, + size_t vl) { + return vsuxei16_v_i16m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i16mf4(int16_t *base, vuint32mf2_t bindex, + vint16mf4_t value, size_t vl) { + return vsuxei32_v_i16mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i16mf2(int16_t *base, vuint32m1_t bindex, + vint16mf2_t value, size_t vl) { + return vsuxei32_v_i16mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i16m1(int16_t *base, vuint32m2_t bindex, vint16m1_t value, + size_t vl) { + return vsuxei32_v_i16m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i16m2(int16_t *base, vuint32m4_t bindex, vint16m2_t value, + size_t vl) { + return vsuxei32_v_i16m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i16.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i16.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i16m4(int16_t *base, vuint32m8_t bindex, vint16m4_t value, + size_t vl) { + return vsuxei32_v_i16m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i16mf4(int16_t *base, vuint64m1_t bindex, + vint16mf4_t value, size_t vl) { + return vsuxei64_v_i16mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i16mf2(int16_t *base, vuint64m2_t bindex, + vint16mf2_t value, size_t vl) { + return vsuxei64_v_i16mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i16m1(int16_t *base, vuint64m4_t bindex, vint16m1_t value, + size_t vl) { + return vsuxei64_v_i16m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i16m2(int16_t *base, vuint64m8_t bindex, vint16m2_t value, + size_t vl) { + return vsuxei64_v_i16m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i32mf2(int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, + size_t vl) { + return vsuxei8_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i32m1(int32_t *base, vuint8mf4_t bindex, vint32m1_t value, + size_t vl) { + return vsuxei8_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i32m2(int32_t *base, vuint8mf2_t bindex, vint32m2_t value, + size_t vl) { + return vsuxei8_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i32m4(int32_t *base, vuint8m1_t bindex, vint32m4_t value, + size_t vl) { + return vsuxei8_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i32.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i32.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i32m8(int32_t *base, vuint8m2_t bindex, vint32m8_t value, + size_t vl) { + return vsuxei8_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i32mf2(int32_t *base, vuint16mf4_t bindex, + vint32mf2_t value, size_t vl) { + return vsuxei16_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i32m1(int32_t *base, vuint16mf2_t bindex, vint32m1_t value, + size_t vl) { + return vsuxei16_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i32m2(int32_t *base, vuint16m1_t bindex, vint32m2_t value, + size_t vl) { + return vsuxei16_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i32m4(int32_t *base, vuint16m2_t bindex, vint32m4_t value, + size_t vl) { + return vsuxei16_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i32.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i32.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i32m8(int32_t *base, vuint16m4_t bindex, vint32m8_t value, + size_t vl) { + return vsuxei16_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i32mf2(int32_t *base, vuint32mf2_t bindex, + vint32mf2_t value, size_t vl) { + return vsuxei32_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i32m1(int32_t *base, vuint32m1_t bindex, vint32m1_t value, + size_t vl) { + return vsuxei32_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i32m2(int32_t *base, vuint32m2_t bindex, vint32m2_t value, + size_t vl) { + return vsuxei32_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i32m4(int32_t *base, vuint32m4_t bindex, vint32m4_t value, + size_t vl) { + return vsuxei32_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i32.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i32.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i32m8(int32_t *base, vuint32m8_t bindex, vint32m8_t value, + size_t vl) { + return vsuxei32_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i32mf2(int32_t *base, vuint64m1_t bindex, + vint32mf2_t value, size_t vl) { + return vsuxei64_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i32m1(int32_t *base, vuint64m2_t bindex, vint32m1_t value, + size_t vl) { + return vsuxei64_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i32m2(int32_t *base, vuint64m4_t bindex, vint32m2_t value, + size_t vl) { + return vsuxei64_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i32m4(int32_t *base, vuint64m8_t bindex, vint32m4_t value, + size_t vl) { + return vsuxei64_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i64m1(int64_t *base, vuint8mf8_t bindex, vint64m1_t value, + size_t vl) { + return vsuxei8_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i64m2(int64_t *base, vuint8mf4_t bindex, vint64m2_t value, + size_t vl) { + return vsuxei8_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i64m4(int64_t *base, vuint8mf2_t bindex, vint64m4_t value, + size_t vl) { + return vsuxei8_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i64m8(int64_t *base, vuint8m1_t bindex, vint64m8_t value, + size_t vl) { + return vsuxei8_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i64m1(int64_t *base, vuint16mf4_t bindex, vint64m1_t value, + size_t vl) { + return vsuxei16_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i64m2(int64_t *base, vuint16mf2_t bindex, vint64m2_t value, + size_t vl) { + return vsuxei16_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i64m4(int64_t *base, vuint16m1_t bindex, vint64m4_t value, + size_t vl) { + return vsuxei16_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i64m8(int64_t *base, vuint16m2_t bindex, vint64m8_t value, + size_t vl) { + return vsuxei16_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i64m1(int64_t *base, vuint32mf2_t bindex, vint64m1_t value, + size_t vl) { + return vsuxei32_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i64m2(int64_t *base, vuint32m1_t bindex, vint64m2_t value, + size_t vl) { + return vsuxei32_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i64m4(int64_t *base, vuint32m2_t bindex, vint64m4_t value, + size_t vl) { + return vsuxei32_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i64m8(int64_t *base, vuint32m4_t bindex, vint64m8_t value, + size_t vl) { + return vsuxei32_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i64m1(int64_t *base, vuint64m1_t bindex, vint64m1_t value, + size_t vl) { + return vsuxei64_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i64m2(int64_t *base, vuint64m2_t bindex, vint64m2_t value, + size_t vl) { + return vsuxei64_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i64m4(int64_t *base, vuint64m4_t bindex, vint64m4_t value, + size_t vl) { + return vsuxei64_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i64m8(int64_t *base, vuint64m8_t bindex, vint64m8_t value, + size_t vl) { + return vsuxei64_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8mf8(uint8_t *base, vuint8mf8_t bindex, vuint8mf8_t value, + size_t vl) { + return vsuxei8_v_u8mf8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8mf4(uint8_t *base, vuint8mf4_t bindex, vuint8mf4_t value, + size_t vl) { + return vsuxei8_v_u8mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8mf2(uint8_t *base, vuint8mf2_t bindex, vuint8mf2_t value, + size_t vl) { + return vsuxei8_v_u8mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8m1(uint8_t *base, vuint8m1_t bindex, vuint8m1_t value, + size_t vl) { + return vsuxei8_v_u8m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i8.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i8.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8m2(uint8_t *base, vuint8m2_t bindex, vuint8m2_t value, + size_t vl) { + return vsuxei8_v_u8m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv32i8.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv32i8.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8m4(uint8_t *base, vuint8m4_t bindex, vuint8m4_t value, + size_t vl) { + return vsuxei8_v_u8m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv64i8.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv64i8.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8m8(uint8_t *base, vuint8m8_t bindex, vuint8m8_t value, + size_t vl) { + return vsuxei8_v_u8m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u8mf8(uint8_t *base, vuint16mf4_t bindex, + vuint8mf8_t value, size_t vl) { + return vsuxei16_v_u8mf8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u8mf4(uint8_t *base, vuint16mf2_t bindex, + vuint8mf4_t value, size_t vl) { + return vsuxei16_v_u8mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u8mf2(uint8_t *base, vuint16m1_t bindex, vuint8mf2_t value, + size_t vl) { + return vsuxei16_v_u8mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u8m1(uint8_t *base, vuint16m2_t bindex, vuint8m1_t value, + size_t vl) { + return vsuxei16_v_u8m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i8.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i8.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u8m2(uint8_t *base, vuint16m4_t bindex, vuint8m2_t value, + size_t vl) { + return vsuxei16_v_u8m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u8m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv32i8.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u8m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv32i8.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u8m4(uint8_t *base, vuint16m8_t bindex, vuint8m4_t value, + size_t vl) { + return vsuxei16_v_u8m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u8mf8(uint8_t *base, vuint32mf2_t bindex, + vuint8mf8_t value, size_t vl) { + return vsuxei32_v_u8mf8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u8mf4(uint8_t *base, vuint32m1_t bindex, vuint8mf4_t value, + size_t vl) { + return vsuxei32_v_u8mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u8mf2(uint8_t *base, vuint32m2_t bindex, vuint8mf2_t value, + size_t vl) { + return vsuxei32_v_u8mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u8m1(uint8_t *base, vuint32m4_t bindex, vuint8m1_t value, + size_t vl) { + return vsuxei32_v_u8m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u8m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i8.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u8m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i8.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u8m2(uint8_t *base, vuint32m8_t bindex, vuint8m2_t value, + size_t vl) { + return vsuxei32_v_u8m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u8mf8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u8mf8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i8.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u8mf8(uint8_t *base, vuint64m1_t bindex, vuint8mf8_t value, + size_t vl) { + return vsuxei64_v_u8mf8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u8mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u8mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i8.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u8mf4(uint8_t *base, vuint64m2_t bindex, vuint8mf4_t value, + size_t vl) { + return vsuxei64_v_u8mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u8mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u8mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i8.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u8mf2(uint8_t *base, vuint64m4_t bindex, vuint8mf2_t value, + size_t vl) { + return vsuxei64_v_u8mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u8m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u8m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i8.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u8m1(uint8_t *base, vuint64m8_t bindex, vuint8m1_t value, + size_t vl) { + return vsuxei64_v_u8m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u16mf4(uint16_t *base, vuint8mf8_t bindex, + vuint16mf4_t value, size_t vl) { + return vsuxei8_v_u16mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u16mf2(uint16_t *base, vuint8mf4_t bindex, + vuint16mf2_t value, size_t vl) { + return vsuxei8_v_u16mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u16m1(uint16_t *base, vuint8mf2_t bindex, vuint16m1_t value, + size_t vl) { + return vsuxei8_v_u16m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u16m2(uint16_t *base, vuint8m1_t bindex, vuint16m2_t value, + size_t vl) { + return vsuxei8_v_u16m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i16.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i16.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u16m4(uint16_t *base, vuint8m2_t bindex, vuint16m4_t value, + size_t vl) { + return vsuxei8_v_u16m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u16m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv32i16.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u16m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv32i16.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u16m8(uint16_t *base, vuint8m4_t bindex, vuint16m8_t value, + size_t vl) { + return vsuxei8_v_u16m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u16mf4(uint16_t *base, vuint16mf4_t bindex, + vuint16mf4_t value, size_t vl) { + return vsuxei16_v_u16mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u16mf2(uint16_t *base, vuint16mf2_t bindex, + vuint16mf2_t value, size_t vl) { + return vsuxei16_v_u16mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u16m1(uint16_t *base, vuint16m1_t bindex, + vuint16m1_t value, size_t vl) { + return vsuxei16_v_u16m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u16m2(uint16_t *base, vuint16m2_t bindex, + vuint16m2_t value, size_t vl) { + return vsuxei16_v_u16m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i16.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i16.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u16m4(uint16_t *base, vuint16m4_t bindex, + vuint16m4_t value, size_t vl) { + return vsuxei16_v_u16m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u16m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv32i16.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u16m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv32i16.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u16m8(uint16_t *base, vuint16m8_t bindex, + vuint16m8_t value, size_t vl) { + return vsuxei16_v_u16m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u16mf4(uint16_t *base, vuint32mf2_t bindex, + vuint16mf4_t value, size_t vl) { + return vsuxei32_v_u16mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u16mf2(uint16_t *base, vuint32m1_t bindex, + vuint16mf2_t value, size_t vl) { + return vsuxei32_v_u16mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u16m1(uint16_t *base, vuint32m2_t bindex, + vuint16m1_t value, size_t vl) { + return vsuxei32_v_u16m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u16m2(uint16_t *base, vuint32m4_t bindex, + vuint16m2_t value, size_t vl) { + return vsuxei32_v_u16m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u16m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i16.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u16m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i16.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u16m4(uint16_t *base, vuint32m8_t bindex, + vuint16m4_t value, size_t vl) { + return vsuxei32_v_u16m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u16mf4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u16mf4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i16.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u16mf4(uint16_t *base, vuint64m1_t bindex, + vuint16mf4_t value, size_t vl) { + return vsuxei64_v_u16mf4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u16mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u16mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i16.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u16mf2(uint16_t *base, vuint64m2_t bindex, + vuint16mf2_t value, size_t vl) { + return vsuxei64_v_u16mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u16m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u16m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i16.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u16m1(uint16_t *base, vuint64m4_t bindex, + vuint16m1_t value, size_t vl) { + return vsuxei64_v_u16m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u16m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u16m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i16.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u16m2(uint16_t *base, vuint64m8_t bindex, + vuint16m2_t value, size_t vl) { + return vsuxei64_v_u16m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u32mf2(uint32_t *base, vuint8mf8_t bindex, + vuint32mf2_t value, size_t vl) { + return vsuxei8_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u32m1(uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, + size_t vl) { + return vsuxei8_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u32m2(uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, + size_t vl) { + return vsuxei8_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u32m4(uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, + size_t vl) { + return vsuxei8_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i32.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i32.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u32m8(uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, + size_t vl) { + return vsuxei8_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u32mf2(uint32_t *base, vuint16mf4_t bindex, + vuint32mf2_t value, size_t vl) { + return vsuxei16_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u32m1(uint32_t *base, vuint16mf2_t bindex, + vuint32m1_t value, size_t vl) { + return vsuxei16_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u32m2(uint32_t *base, vuint16m1_t bindex, + vuint32m2_t value, size_t vl) { + return vsuxei16_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u32m4(uint32_t *base, vuint16m2_t bindex, + vuint32m4_t value, size_t vl) { + return vsuxei16_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i32.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i32.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u32m8(uint32_t *base, vuint16m4_t bindex, + vuint32m8_t value, size_t vl) { + return vsuxei16_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u32mf2(uint32_t *base, vuint32mf2_t bindex, + vuint32mf2_t value, size_t vl) { + return vsuxei32_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u32m1(uint32_t *base, vuint32m1_t bindex, + vuint32m1_t value, size_t vl) { + return vsuxei32_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u32m2(uint32_t *base, vuint32m2_t bindex, + vuint32m2_t value, size_t vl) { + return vsuxei32_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u32m4(uint32_t *base, vuint32m4_t bindex, + vuint32m4_t value, size_t vl) { + return vsuxei32_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16i32.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16i32.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u32m8(uint32_t *base, vuint32m8_t bindex, + vuint32m8_t value, size_t vl) { + return vsuxei32_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i32.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u32mf2(uint32_t *base, vuint64m1_t bindex, + vuint32mf2_t value, size_t vl) { + return vsuxei64_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i32.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u32m1(uint32_t *base, vuint64m2_t bindex, + vuint32m1_t value, size_t vl) { + return vsuxei64_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i32.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u32m2(uint32_t *base, vuint64m4_t bindex, + vuint32m2_t value, size_t vl) { + return vsuxei64_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i32.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u32m4(uint32_t *base, vuint64m8_t bindex, + vuint32m4_t value, size_t vl) { + return vsuxei64_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u64m1(uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, + size_t vl) { + return vsuxei8_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u64m2(uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, + size_t vl) { + return vsuxei8_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u64m4(uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, + size_t vl) { + return vsuxei8_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u64m8(uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, + size_t vl) { + return vsuxei8_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u64m1(uint64_t *base, vuint16mf4_t bindex, + vuint64m1_t value, size_t vl) { + return vsuxei16_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u64m2(uint64_t *base, vuint16mf2_t bindex, + vuint64m2_t value, size_t vl) { + return vsuxei16_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u64m4(uint64_t *base, vuint16m1_t bindex, + vuint64m4_t value, size_t vl) { + return vsuxei16_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u64m8(uint64_t *base, vuint16m2_t bindex, + vuint64m8_t value, size_t vl) { + return vsuxei16_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u64m1(uint64_t *base, vuint32mf2_t bindex, + vuint64m1_t value, size_t vl) { + return vsuxei32_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u64m2(uint64_t *base, vuint32m1_t bindex, + vuint64m2_t value, size_t vl) { + return vsuxei32_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u64m4(uint64_t *base, vuint32m2_t bindex, + vuint64m4_t value, size_t vl) { + return vsuxei32_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u64m8(uint64_t *base, vuint32m4_t bindex, + vuint64m8_t value, size_t vl) { + return vsuxei32_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1i64.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u64m1(uint64_t *base, vuint64m1_t bindex, + vuint64m1_t value, size_t vl) { + return vsuxei64_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2i64.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u64m2(uint64_t *base, vuint64m2_t bindex, + vuint64m2_t value, size_t vl) { + return vsuxei64_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4i64.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u64m4(uint64_t *base, vuint64m4_t bindex, + vuint64m4_t value, size_t vl) { + return vsuxei64_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8i64.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u64m8(uint64_t *base, vuint64m8_t bindex, + vuint64m8_t value, size_t vl) { + return vsuxei64_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1f32.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1f32.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f32mf2(float *base, vuint8mf8_t bindex, vfloat32mf2_t value, + size_t vl) { + return vsuxei8_v_f32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2f32.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2f32.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f32m1(float *base, vuint8mf4_t bindex, vfloat32m1_t value, + size_t vl) { + return vsuxei8_v_f32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4f32.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4f32.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f32m2(float *base, vuint8mf2_t bindex, vfloat32m2_t value, + size_t vl) { + return vsuxei8_v_f32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8f32.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8f32.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f32m4(float *base, vuint8m1_t bindex, vfloat32m4_t value, + size_t vl) { + return vsuxei8_v_f32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16f32.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16f32.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f32m8(float *base, vuint8m2_t bindex, vfloat32m8_t value, + size_t vl) { + return vsuxei8_v_f32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1f32.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1f32.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f32mf2(float *base, vuint16mf4_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsuxei16_v_f32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2f32.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2f32.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f32m1(float *base, vuint16mf2_t bindex, vfloat32m1_t value, + size_t vl) { + return vsuxei16_v_f32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4f32.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4f32.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f32m2(float *base, vuint16m1_t bindex, vfloat32m2_t value, + size_t vl) { + return vsuxei16_v_f32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8f32.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8f32.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f32m4(float *base, vuint16m2_t bindex, vfloat32m4_t value, + size_t vl) { + return vsuxei16_v_f32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16f32.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16f32.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f32m8(float *base, vuint16m4_t bindex, vfloat32m8_t value, + size_t vl) { + return vsuxei16_v_f32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1f32.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1f32.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f32mf2(float *base, vuint32mf2_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsuxei32_v_f32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2f32.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2f32.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f32m1(float *base, vuint32m1_t bindex, vfloat32m1_t value, + size_t vl) { + return vsuxei32_v_f32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4f32.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4f32.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f32m2(float *base, vuint32m2_t bindex, vfloat32m2_t value, + size_t vl) { + return vsuxei32_v_f32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8f32.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8f32.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f32m4(float *base, vuint32m4_t bindex, vfloat32m4_t value, + size_t vl) { + return vsuxei32_v_f32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv16f32.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv16f32.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f32m8(float *base, vuint32m8_t bindex, vfloat32m8_t value, + size_t vl) { + return vsuxei32_v_f32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1f32.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1f32.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f32mf2(float *base, vuint64m1_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsuxei64_v_f32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2f32.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2f32.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f32m1(float *base, vuint64m2_t bindex, vfloat32m1_t value, + size_t vl) { + return vsuxei64_v_f32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4f32.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4f32.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f32m2(float *base, vuint64m4_t bindex, vfloat32m2_t value, + size_t vl) { + return vsuxei64_v_f32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8f32.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8f32.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f32m4(float *base, vuint64m8_t bindex, vfloat32m4_t value, + size_t vl) { + return vsuxei64_v_f32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1f64.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1f64.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f64m1(double *base, vuint8mf8_t bindex, vfloat64m1_t value, + size_t vl) { + return vsuxei8_v_f64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2f64.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2f64.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f64m2(double *base, vuint8mf4_t bindex, vfloat64m2_t value, + size_t vl) { + return vsuxei8_v_f64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4f64.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4f64.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f64m4(double *base, vuint8mf2_t bindex, vfloat64m4_t value, + size_t vl) { + return vsuxei8_v_f64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8f64.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8f64.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f64m8(double *base, vuint8m1_t bindex, vfloat64m8_t value, + size_t vl) { + return vsuxei8_v_f64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1f64.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1f64.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f64m1(double *base, vuint16mf4_t bindex, + vfloat64m1_t value, size_t vl) { + return vsuxei16_v_f64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2f64.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2f64.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f64m2(double *base, vuint16mf2_t bindex, + vfloat64m2_t value, size_t vl) { + return vsuxei16_v_f64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4f64.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4f64.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f64m4(double *base, vuint16m1_t bindex, vfloat64m4_t value, + size_t vl) { + return vsuxei16_v_f64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8f64.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8f64.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f64m8(double *base, vuint16m2_t bindex, vfloat64m8_t value, + size_t vl) { + return vsuxei16_v_f64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1f64.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1f64.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f64m1(double *base, vuint32mf2_t bindex, + vfloat64m1_t value, size_t vl) { + return vsuxei32_v_f64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2f64.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2f64.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f64m2(double *base, vuint32m1_t bindex, vfloat64m2_t value, + size_t vl) { + return vsuxei32_v_f64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4f64.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4f64.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f64m4(double *base, vuint32m2_t bindex, vfloat64m4_t value, + size_t vl) { + return vsuxei32_v_f64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8f64.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8f64.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f64m8(double *base, vuint32m4_t bindex, vfloat64m8_t value, + size_t vl) { + return vsuxei32_v_f64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv1f64.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv1f64.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f64m1(double *base, vuint64m1_t bindex, vfloat64m1_t value, + size_t vl) { + return vsuxei64_v_f64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv2f64.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv2f64.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f64m2(double *base, vuint64m2_t bindex, vfloat64m2_t value, + size_t vl) { + return vsuxei64_v_f64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv4f64.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv4f64.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f64m4(double *base, vuint64m4_t bindex, vfloat64m4_t value, + size_t vl) { + return vsuxei64_v_f64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.nxv8f64.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.nxv8f64.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f64m8(double *base, vuint64m8_t bindex, vfloat64m8_t value, + size_t vl) { + return vsuxei64_v_f64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8mf8_m(vbool64_t mask, int8_t *base, vuint8mf8_t bindex, + vint8mf8_t value, size_t vl) { + return vsuxei8_v_i8mf8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8mf4_m(vbool32_t mask, int8_t *base, vuint8mf4_t bindex, + vint8mf4_t value, size_t vl) { + return vsuxei8_v_i8mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8mf2_m(vbool16_t mask, int8_t *base, vuint8mf2_t bindex, + vint8mf2_t value, size_t vl) { + return vsuxei8_v_i8mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8m1_m(vbool8_t mask, int8_t *base, vuint8m1_t bindex, + vint8m1_t value, size_t vl) { + return vsuxei8_v_i8m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i8.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i8.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8m2_m(vbool4_t mask, int8_t *base, vuint8m2_t bindex, + vint8m2_t value, size_t vl) { + return vsuxei8_v_i8m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i8.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i8.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8m4_m(vbool2_t mask, int8_t *base, vuint8m4_t bindex, + vint8m4_t value, size_t vl) { + return vsuxei8_v_i8m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i8m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv64i8.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i8m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv64i8.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i8m8_m(vbool1_t mask, int8_t *base, vuint8m8_t bindex, + vint8m8_t value, size_t vl) { + return vsuxei8_v_i8m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i8mf8_m(vbool64_t mask, int8_t *base, vuint16mf4_t bindex, + vint8mf8_t value, size_t vl) { + return vsuxei16_v_i8mf8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i8mf4_m(vbool32_t mask, int8_t *base, vuint16mf2_t bindex, + vint8mf4_t value, size_t vl) { + return vsuxei16_v_i8mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i8mf2_m(vbool16_t mask, int8_t *base, vuint16m1_t bindex, + vint8mf2_t value, size_t vl) { + return vsuxei16_v_i8mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i8m1_m(vbool8_t mask, int8_t *base, vuint16m2_t bindex, + vint8m1_t value, size_t vl) { + return vsuxei16_v_i8m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i8.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i8.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i8m2_m(vbool4_t mask, int8_t *base, vuint16m4_t bindex, + vint8m2_t value, size_t vl) { + return vsuxei16_v_i8m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i8m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i8.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i8m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i8.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i8m4_m(vbool2_t mask, int8_t *base, vuint16m8_t bindex, + vint8m4_t value, size_t vl) { + return vsuxei16_v_i8m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i8mf8_m(vbool64_t mask, int8_t *base, vuint32mf2_t bindex, + vint8mf8_t value, size_t vl) { + return vsuxei32_v_i8mf8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i8mf4_m(vbool32_t mask, int8_t *base, vuint32m1_t bindex, + vint8mf4_t value, size_t vl) { + return vsuxei32_v_i8mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i8mf2_m(vbool16_t mask, int8_t *base, vuint32m2_t bindex, + vint8mf2_t value, size_t vl) { + return vsuxei32_v_i8mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i8m1_m(vbool8_t mask, int8_t *base, vuint32m4_t bindex, + vint8m1_t value, size_t vl) { + return vsuxei32_v_i8m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i8.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i8.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i8m2_m(vbool4_t mask, int8_t *base, vuint32m8_t bindex, + vint8m2_t value, size_t vl) { + return vsuxei32_v_i8m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i8mf8_m(vbool64_t mask, int8_t *base, vuint64m1_t bindex, + vint8mf8_t value, size_t vl) { + return vsuxei64_v_i8mf8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i8mf4_m(vbool32_t mask, int8_t *base, vuint64m2_t bindex, + vint8mf4_t value, size_t vl) { + return vsuxei64_v_i8mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i8mf2_m(vbool16_t mask, int8_t *base, vuint64m4_t bindex, + vint8mf2_t value, size_t vl) { + return vsuxei64_v_i8mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i8m1_m(vbool8_t mask, int8_t *base, vuint64m8_t bindex, + vint8m1_t value, size_t vl) { + return vsuxei64_v_i8m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i16mf4_m(vbool64_t mask, int16_t *base, vuint8mf8_t bindex, + vint16mf4_t value, size_t vl) { + return vsuxei8_v_i16mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i16mf2_m(vbool32_t mask, int16_t *base, vuint8mf4_t bindex, + vint16mf2_t value, size_t vl) { + return vsuxei8_v_i16mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i16m1_m(vbool16_t mask, int16_t *base, vuint8mf2_t bindex, + vint16m1_t value, size_t vl) { + return vsuxei8_v_i16m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i16m2_m(vbool8_t mask, int16_t *base, vuint8m1_t bindex, + vint16m2_t value, size_t vl) { + return vsuxei8_v_i16m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i16.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i16.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i16m4_m(vbool4_t mask, int16_t *base, vuint8m2_t bindex, + vint16m4_t value, size_t vl) { + return vsuxei8_v_i16m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i16m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i16.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i16m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i16.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i16m8_m(vbool2_t mask, int16_t *base, vuint8m4_t bindex, + vint16m8_t value, size_t vl) { + return vsuxei8_v_i16m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i16mf4_m(vbool64_t mask, int16_t *base, + vuint16mf4_t bindex, vint16mf4_t value, + size_t vl) { + return vsuxei16_v_i16mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i16mf2_m(vbool32_t mask, int16_t *base, + vuint16mf2_t bindex, vint16mf2_t value, + size_t vl) { + return vsuxei16_v_i16mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i16m1_m(vbool16_t mask, int16_t *base, vuint16m1_t bindex, + vint16m1_t value, size_t vl) { + return vsuxei16_v_i16m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i16m2_m(vbool8_t mask, int16_t *base, vuint16m2_t bindex, + vint16m2_t value, size_t vl) { + return vsuxei16_v_i16m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i16.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i16.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i16m4_m(vbool4_t mask, int16_t *base, vuint16m4_t bindex, + vint16m4_t value, size_t vl) { + return vsuxei16_v_i16m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i16m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i16.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i16m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i16.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i16m8_m(vbool2_t mask, int16_t *base, vuint16m8_t bindex, + vint16m8_t value, size_t vl) { + return vsuxei16_v_i16m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i16mf4_m(vbool64_t mask, int16_t *base, + vuint32mf2_t bindex, vint16mf4_t value, + size_t vl) { + return vsuxei32_v_i16mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i16mf2_m(vbool32_t mask, int16_t *base, vuint32m1_t bindex, + vint16mf2_t value, size_t vl) { + return vsuxei32_v_i16mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i16m1_m(vbool16_t mask, int16_t *base, vuint32m2_t bindex, + vint16m1_t value, size_t vl) { + return vsuxei32_v_i16m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i16m2_m(vbool8_t mask, int16_t *base, vuint32m4_t bindex, + vint16m2_t value, size_t vl) { + return vsuxei32_v_i16m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i16.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i16.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i16m4_m(vbool4_t mask, int16_t *base, vuint32m8_t bindex, + vint16m4_t value, size_t vl) { + return vsuxei32_v_i16m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i16mf4_m(vbool64_t mask, int16_t *base, vuint64m1_t bindex, + vint16mf4_t value, size_t vl) { + return vsuxei64_v_i16mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i16mf2_m(vbool32_t mask, int16_t *base, vuint64m2_t bindex, + vint16mf2_t value, size_t vl) { + return vsuxei64_v_i16mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i16m1_m(vbool16_t mask, int16_t *base, vuint64m4_t bindex, + vint16m1_t value, size_t vl) { + return vsuxei64_v_i16m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i16m2_m(vbool8_t mask, int16_t *base, vuint64m8_t bindex, + vint16m2_t value, size_t vl) { + return vsuxei64_v_i16m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i32mf2_m(vbool64_t mask, int32_t *base, vuint8mf8_t bindex, + vint32mf2_t value, size_t vl) { + return vsuxei8_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i32m1_m(vbool32_t mask, int32_t *base, vuint8mf4_t bindex, + vint32m1_t value, size_t vl) { + return vsuxei8_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i32m2_m(vbool16_t mask, int32_t *base, vuint8mf2_t bindex, + vint32m2_t value, size_t vl) { + return vsuxei8_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i32m4_m(vbool8_t mask, int32_t *base, vuint8m1_t bindex, + vint32m4_t value, size_t vl) { + return vsuxei8_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i32.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i32.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i32m8_m(vbool4_t mask, int32_t *base, vuint8m2_t bindex, + vint32m8_t value, size_t vl) { + return vsuxei8_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i32mf2_m(vbool64_t mask, int32_t *base, + vuint16mf4_t bindex, vint32mf2_t value, + size_t vl) { + return vsuxei16_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i32m1_m(vbool32_t mask, int32_t *base, vuint16mf2_t bindex, + vint32m1_t value, size_t vl) { + return vsuxei16_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i32m2_m(vbool16_t mask, int32_t *base, vuint16m1_t bindex, + vint32m2_t value, size_t vl) { + return vsuxei16_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i32m4_m(vbool8_t mask, int32_t *base, vuint16m2_t bindex, + vint32m4_t value, size_t vl) { + return vsuxei16_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i32.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i32.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i32m8_m(vbool4_t mask, int32_t *base, vuint16m4_t bindex, + vint32m8_t value, size_t vl) { + return vsuxei16_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i32mf2_m(vbool64_t mask, int32_t *base, + vuint32mf2_t bindex, vint32mf2_t value, + size_t vl) { + return vsuxei32_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i32m1_m(vbool32_t mask, int32_t *base, vuint32m1_t bindex, + vint32m1_t value, size_t vl) { + return vsuxei32_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i32m2_m(vbool16_t mask, int32_t *base, vuint32m2_t bindex, + vint32m2_t value, size_t vl) { + return vsuxei32_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i32m4_m(vbool8_t mask, int32_t *base, vuint32m4_t bindex, + vint32m4_t value, size_t vl) { + return vsuxei32_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i32.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i32.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i32m8_m(vbool4_t mask, int32_t *base, vuint32m8_t bindex, + vint32m8_t value, size_t vl) { + return vsuxei32_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i32mf2_m(vbool64_t mask, int32_t *base, vuint64m1_t bindex, + vint32mf2_t value, size_t vl) { + return vsuxei64_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i32m1_m(vbool32_t mask, int32_t *base, vuint64m2_t bindex, + vint32m1_t value, size_t vl) { + return vsuxei64_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i32m2_m(vbool16_t mask, int32_t *base, vuint64m4_t bindex, + vint32m2_t value, size_t vl) { + return vsuxei64_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i32m4_m(vbool8_t mask, int32_t *base, vuint64m8_t bindex, + vint32m4_t value, size_t vl) { + return vsuxei64_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i64m1_m(vbool64_t mask, int64_t *base, vuint8mf8_t bindex, + vint64m1_t value, size_t vl) { + return vsuxei8_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i64m2_m(vbool32_t mask, int64_t *base, vuint8mf4_t bindex, + vint64m2_t value, size_t vl) { + return vsuxei8_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i64m4_m(vbool16_t mask, int64_t *base, vuint8mf2_t bindex, + vint64m4_t value, size_t vl) { + return vsuxei8_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_i64m8_m(vbool8_t mask, int64_t *base, vuint8m1_t bindex, + vint64m8_t value, size_t vl) { + return vsuxei8_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i64m1_m(vbool64_t mask, int64_t *base, vuint16mf4_t bindex, + vint64m1_t value, size_t vl) { + return vsuxei16_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i64m2_m(vbool32_t mask, int64_t *base, vuint16mf2_t bindex, + vint64m2_t value, size_t vl) { + return vsuxei16_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i64m4_m(vbool16_t mask, int64_t *base, vuint16m1_t bindex, + vint64m4_t value, size_t vl) { + return vsuxei16_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_i64m8_m(vbool8_t mask, int64_t *base, vuint16m2_t bindex, + vint64m8_t value, size_t vl) { + return vsuxei16_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i64m1_m(vbool64_t mask, int64_t *base, vuint32mf2_t bindex, + vint64m1_t value, size_t vl) { + return vsuxei32_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i64m2_m(vbool32_t mask, int64_t *base, vuint32m1_t bindex, + vint64m2_t value, size_t vl) { + return vsuxei32_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i64m4_m(vbool16_t mask, int64_t *base, vuint32m2_t bindex, + vint64m4_t value, size_t vl) { + return vsuxei32_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_i64m8_m(vbool8_t mask, int64_t *base, vuint32m4_t bindex, + vint64m8_t value, size_t vl) { + return vsuxei32_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i64m1_m(vbool64_t mask, int64_t *base, vuint64m1_t bindex, + vint64m1_t value, size_t vl) { + return vsuxei64_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i64m2_m(vbool32_t mask, int64_t *base, vuint64m2_t bindex, + vint64m2_t value, size_t vl) { + return vsuxei64_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i64m4_m(vbool16_t mask, int64_t *base, vuint64m4_t bindex, + vint64m4_t value, size_t vl) { + return vsuxei64_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_i64m8_m(vbool8_t mask, int64_t *base, vuint64m8_t bindex, + vint64m8_t value, size_t vl) { + return vsuxei64_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8mf8_m(vbool64_t mask, uint8_t *base, vuint8mf8_t bindex, + vuint8mf8_t value, size_t vl) { + return vsuxei8_v_u8mf8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8mf4_m(vbool32_t mask, uint8_t *base, vuint8mf4_t bindex, + vuint8mf4_t value, size_t vl) { + return vsuxei8_v_u8mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8mf2_m(vbool16_t mask, uint8_t *base, vuint8mf2_t bindex, + vuint8mf2_t value, size_t vl) { + return vsuxei8_v_u8mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8m1_m(vbool8_t mask, uint8_t *base, vuint8m1_t bindex, + vuint8m1_t value, size_t vl) { + return vsuxei8_v_u8m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i8.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i8.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8m2_m(vbool4_t mask, uint8_t *base, vuint8m2_t bindex, + vuint8m2_t value, size_t vl) { + return vsuxei8_v_u8m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i8.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i8.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8m4_m(vbool2_t mask, uint8_t *base, vuint8m4_t bindex, + vuint8m4_t value, size_t vl) { + return vsuxei8_v_u8m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u8m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv64i8.nxv64i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u8m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv64i8.nxv64i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u8m8_m(vbool1_t mask, uint8_t *base, vuint8m8_t bindex, + vuint8m8_t value, size_t vl) { + return vsuxei8_v_u8m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u8mf8_m(vbool64_t mask, uint8_t *base, vuint16mf4_t bindex, + vuint8mf8_t value, size_t vl) { + return vsuxei16_v_u8mf8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u8mf4_m(vbool32_t mask, uint8_t *base, vuint16mf2_t bindex, + vuint8mf4_t value, size_t vl) { + return vsuxei16_v_u8mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u8mf2_m(vbool16_t mask, uint8_t *base, vuint16m1_t bindex, + vuint8mf2_t value, size_t vl) { + return vsuxei16_v_u8mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u8m1_m(vbool8_t mask, uint8_t *base, vuint16m2_t bindex, + vuint8m1_t value, size_t vl) { + return vsuxei16_v_u8m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i8.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i8.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u8m2_m(vbool4_t mask, uint8_t *base, vuint16m4_t bindex, + vuint8m2_t value, size_t vl) { + return vsuxei16_v_u8m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u8m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i8.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u8m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i8.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u8m4_m(vbool2_t mask, uint8_t *base, vuint16m8_t bindex, + vuint8m4_t value, size_t vl) { + return vsuxei16_v_u8m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u8mf8_m(vbool64_t mask, uint8_t *base, vuint32mf2_t bindex, + vuint8mf8_t value, size_t vl) { + return vsuxei32_v_u8mf8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u8mf4_m(vbool32_t mask, uint8_t *base, vuint32m1_t bindex, + vuint8mf4_t value, size_t vl) { + return vsuxei32_v_u8mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u8mf2_m(vbool16_t mask, uint8_t *base, vuint32m2_t bindex, + vuint8mf2_t value, size_t vl) { + return vsuxei32_v_u8mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u8m1_m(vbool8_t mask, uint8_t *base, vuint32m4_t bindex, + vuint8m1_t value, size_t vl) { + return vsuxei32_v_u8m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u8m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i8.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u8m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i8.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u8m2_m(vbool4_t mask, uint8_t *base, vuint32m8_t bindex, + vuint8m2_t value, size_t vl) { + return vsuxei32_v_u8m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u8mf8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u8mf8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i8.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u8mf8_m(vbool64_t mask, uint8_t *base, vuint64m1_t bindex, + vuint8mf8_t value, size_t vl) { + return vsuxei64_v_u8mf8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u8mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u8mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i8.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u8mf4_m(vbool32_t mask, uint8_t *base, vuint64m2_t bindex, + vuint8mf4_t value, size_t vl) { + return vsuxei64_v_u8mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u8mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u8mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i8.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u8mf2_m(vbool16_t mask, uint8_t *base, vuint64m4_t bindex, + vuint8mf2_t value, size_t vl) { + return vsuxei64_v_u8mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u8m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u8m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i8* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i8.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u8m1_m(vbool8_t mask, uint8_t *base, vuint64m8_t bindex, + vuint8m1_t value, size_t vl) { + return vsuxei64_v_u8m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u16mf4_m(vbool64_t mask, uint16_t *base, vuint8mf8_t bindex, + vuint16mf4_t value, size_t vl) { + return vsuxei8_v_u16mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u16mf2_m(vbool32_t mask, uint16_t *base, vuint8mf4_t bindex, + vuint16mf2_t value, size_t vl) { + return vsuxei8_v_u16mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u16m1_m(vbool16_t mask, uint16_t *base, vuint8mf2_t bindex, + vuint16m1_t value, size_t vl) { + return vsuxei8_v_u16m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u16m2_m(vbool8_t mask, uint16_t *base, vuint8m1_t bindex, + vuint16m2_t value, size_t vl) { + return vsuxei8_v_u16m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i16.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i16.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u16m4_m(vbool4_t mask, uint16_t *base, vuint8m2_t bindex, + vuint16m4_t value, size_t vl) { + return vsuxei8_v_u16m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u16m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i16.nxv32i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u16m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i16.nxv32i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u16m8_m(vbool2_t mask, uint16_t *base, vuint8m4_t bindex, + vuint16m8_t value, size_t vl) { + return vsuxei8_v_u16m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u16mf4_m(vbool64_t mask, uint16_t *base, + vuint16mf4_t bindex, vuint16mf4_t value, + size_t vl) { + return vsuxei16_v_u16mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u16mf2_m(vbool32_t mask, uint16_t *base, + vuint16mf2_t bindex, vuint16mf2_t value, + size_t vl) { + return vsuxei16_v_u16mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u16m1_m(vbool16_t mask, uint16_t *base, vuint16m1_t bindex, + vuint16m1_t value, size_t vl) { + return vsuxei16_v_u16m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u16m2_m(vbool8_t mask, uint16_t *base, vuint16m2_t bindex, + vuint16m2_t value, size_t vl) { + return vsuxei16_v_u16m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i16.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i16.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u16m4_m(vbool4_t mask, uint16_t *base, vuint16m4_t bindex, + vuint16m4_t value, size_t vl) { + return vsuxei16_v_u16m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u16m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i16.nxv32i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u16m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv32i16.nxv32i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u16m8_m(vbool2_t mask, uint16_t *base, vuint16m8_t bindex, + vuint16m8_t value, size_t vl) { + return vsuxei16_v_u16m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u16mf4_m(vbool64_t mask, uint16_t *base, + vuint32mf2_t bindex, vuint16mf4_t value, + size_t vl) { + return vsuxei32_v_u16mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u16mf2_m(vbool32_t mask, uint16_t *base, + vuint32m1_t bindex, vuint16mf2_t value, + size_t vl) { + return vsuxei32_v_u16mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u16m1_m(vbool16_t mask, uint16_t *base, vuint32m2_t bindex, + vuint16m1_t value, size_t vl) { + return vsuxei32_v_u16m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u16m2_m(vbool8_t mask, uint16_t *base, vuint32m4_t bindex, + vuint16m2_t value, size_t vl) { + return vsuxei32_v_u16m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u16m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i16.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u16m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i16.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u16m4_m(vbool4_t mask, uint16_t *base, vuint32m8_t bindex, + vuint16m4_t value, size_t vl) { + return vsuxei32_v_u16m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u16mf4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u16mf4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i16.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u16mf4_m(vbool64_t mask, uint16_t *base, + vuint64m1_t bindex, vuint16mf4_t value, + size_t vl) { + return vsuxei64_v_u16mf4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u16mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u16mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i16.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u16mf2_m(vbool32_t mask, uint16_t *base, + vuint64m2_t bindex, vuint16mf2_t value, + size_t vl) { + return vsuxei64_v_u16mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u16m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u16m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i16.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u16m1_m(vbool16_t mask, uint16_t *base, vuint64m4_t bindex, + vuint16m1_t value, size_t vl) { + return vsuxei64_v_u16m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u16m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u16m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i16* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i16.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u16m2_m(vbool8_t mask, uint16_t *base, vuint64m8_t bindex, + vuint16m2_t value, size_t vl) { + return vsuxei64_v_u16m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u32mf2_m(vbool64_t mask, uint32_t *base, vuint8mf8_t bindex, + vuint32mf2_t value, size_t vl) { + return vsuxei8_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u32m1_m(vbool32_t mask, uint32_t *base, vuint8mf4_t bindex, + vuint32m1_t value, size_t vl) { + return vsuxei8_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u32m2_m(vbool16_t mask, uint32_t *base, vuint8mf2_t bindex, + vuint32m2_t value, size_t vl) { + return vsuxei8_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u32m4_m(vbool8_t mask, uint32_t *base, vuint8m1_t bindex, + vuint32m4_t value, size_t vl) { + return vsuxei8_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i32.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i32.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u32m8_m(vbool4_t mask, uint32_t *base, vuint8m2_t bindex, + vuint32m8_t value, size_t vl) { + return vsuxei8_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u32mf2_m(vbool64_t mask, uint32_t *base, + vuint16mf4_t bindex, vuint32mf2_t value, + size_t vl) { + return vsuxei16_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u32m1_m(vbool32_t mask, uint32_t *base, + vuint16mf2_t bindex, vuint32m1_t value, + size_t vl) { + return vsuxei16_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u32m2_m(vbool16_t mask, uint32_t *base, vuint16m1_t bindex, + vuint32m2_t value, size_t vl) { + return vsuxei16_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u32m4_m(vbool8_t mask, uint32_t *base, vuint16m2_t bindex, + vuint32m4_t value, size_t vl) { + return vsuxei16_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i32.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i32.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u32m8_m(vbool4_t mask, uint32_t *base, vuint16m4_t bindex, + vuint32m8_t value, size_t vl) { + return vsuxei16_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u32mf2_m(vbool64_t mask, uint32_t *base, + vuint32mf2_t bindex, vuint32mf2_t value, + size_t vl) { + return vsuxei32_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u32m1_m(vbool32_t mask, uint32_t *base, vuint32m1_t bindex, + vuint32m1_t value, size_t vl) { + return vsuxei32_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u32m2_m(vbool16_t mask, uint32_t *base, vuint32m2_t bindex, + vuint32m2_t value, size_t vl) { + return vsuxei32_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u32m4_m(vbool8_t mask, uint32_t *base, vuint32m4_t bindex, + vuint32m4_t value, size_t vl) { + return vsuxei32_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i32.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16i32.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u32m8_m(vbool4_t mask, uint32_t *base, vuint32m8_t bindex, + vuint32m8_t value, size_t vl) { + return vsuxei32_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i32.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u32mf2_m(vbool64_t mask, uint32_t *base, + vuint64m1_t bindex, vuint32mf2_t value, + size_t vl) { + return vsuxei64_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i32.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u32m1_m(vbool32_t mask, uint32_t *base, vuint64m2_t bindex, + vuint32m1_t value, size_t vl) { + return vsuxei64_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i32.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u32m2_m(vbool16_t mask, uint32_t *base, vuint64m4_t bindex, + vuint32m2_t value, size_t vl) { + return vsuxei64_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i32.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u32m4_m(vbool8_t mask, uint32_t *base, vuint64m8_t bindex, + vuint32m4_t value, size_t vl) { + return vsuxei64_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u64m1_m(vbool64_t mask, uint64_t *base, vuint8mf8_t bindex, + vuint64m1_t value, size_t vl) { + return vsuxei8_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u64m2_m(vbool32_t mask, uint64_t *base, vuint8mf4_t bindex, + vuint64m2_t value, size_t vl) { + return vsuxei8_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u64m4_m(vbool16_t mask, uint64_t *base, vuint8mf2_t bindex, + vuint64m4_t value, size_t vl) { + return vsuxei8_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_u64m8_m(vbool8_t mask, uint64_t *base, vuint8m1_t bindex, + vuint64m8_t value, size_t vl) { + return vsuxei8_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u64m1_m(vbool64_t mask, uint64_t *base, + vuint16mf4_t bindex, vuint64m1_t value, + size_t vl) { + return vsuxei16_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u64m2_m(vbool32_t mask, uint64_t *base, + vuint16mf2_t bindex, vuint64m2_t value, + size_t vl) { + return vsuxei16_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u64m4_m(vbool16_t mask, uint64_t *base, vuint16m1_t bindex, + vuint64m4_t value, size_t vl) { + return vsuxei16_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_u64m8_m(vbool8_t mask, uint64_t *base, vuint16m2_t bindex, + vuint64m8_t value, size_t vl) { + return vsuxei16_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u64m1_m(vbool64_t mask, uint64_t *base, + vuint32mf2_t bindex, vuint64m1_t value, + size_t vl) { + return vsuxei32_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u64m2_m(vbool32_t mask, uint64_t *base, vuint32m1_t bindex, + vuint64m2_t value, size_t vl) { + return vsuxei32_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u64m4_m(vbool16_t mask, uint64_t *base, vuint32m2_t bindex, + vuint64m4_t value, size_t vl) { + return vsuxei32_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_u64m8_m(vbool8_t mask, uint64_t *base, vuint32m4_t bindex, + vuint64m8_t value, size_t vl) { + return vsuxei32_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1i64.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u64m1_m(vbool64_t mask, uint64_t *base, vuint64m1_t bindex, + vuint64m1_t value, size_t vl) { + return vsuxei64_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2i64.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u64m2_m(vbool32_t mask, uint64_t *base, vuint64m2_t bindex, + vuint64m2_t value, size_t vl) { + return vsuxei64_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4i64.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u64m4_m(vbool16_t mask, uint64_t *base, vuint64m4_t bindex, + vuint64m4_t value, size_t vl) { + return vsuxei64_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8i64.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_u64m8_m(vbool8_t mask, uint64_t *base, vuint64m8_t bindex, + vuint64m8_t value, size_t vl) { + return vsuxei64_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f32.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f32.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f32mf2_m(vbool64_t mask, float *base, vuint8mf8_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsuxei8_v_f32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f32.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f32.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f32m1_m(vbool32_t mask, float *base, vuint8mf4_t bindex, + vfloat32m1_t value, size_t vl) { + return vsuxei8_v_f32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f32.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f32.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f32m2_m(vbool16_t mask, float *base, vuint8mf2_t bindex, + vfloat32m2_t value, size_t vl) { + return vsuxei8_v_f32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f32.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f32.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f32m4_m(vbool8_t mask, float *base, vuint8m1_t bindex, + vfloat32m4_t value, size_t vl) { + return vsuxei8_v_f32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16f32.nxv16i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16f32.nxv16i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f32m8_m(vbool4_t mask, float *base, vuint8m2_t bindex, + vfloat32m8_t value, size_t vl) { + return vsuxei8_v_f32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f32.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f32.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f32mf2_m(vbool64_t mask, float *base, vuint16mf4_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsuxei16_v_f32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f32.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f32.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f32m1_m(vbool32_t mask, float *base, vuint16mf2_t bindex, + vfloat32m1_t value, size_t vl) { + return vsuxei16_v_f32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f32.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f32.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f32m2_m(vbool16_t mask, float *base, vuint16m1_t bindex, + vfloat32m2_t value, size_t vl) { + return vsuxei16_v_f32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f32.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f32.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f32m4_m(vbool8_t mask, float *base, vuint16m2_t bindex, + vfloat32m4_t value, size_t vl) { + return vsuxei16_v_f32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16f32.nxv16i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16f32.nxv16i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f32m8_m(vbool4_t mask, float *base, vuint16m4_t bindex, + vfloat32m8_t value, size_t vl) { + return vsuxei16_v_f32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f32.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f32.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f32mf2_m(vbool64_t mask, float *base, vuint32mf2_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsuxei32_v_f32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f32.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f32.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f32m1_m(vbool32_t mask, float *base, vuint32m1_t bindex, + vfloat32m1_t value, size_t vl) { + return vsuxei32_v_f32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f32.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f32.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f32m2_m(vbool16_t mask, float *base, vuint32m2_t bindex, + vfloat32m2_t value, size_t vl) { + return vsuxei32_v_f32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f32.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f32.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f32m4_m(vbool8_t mask, float *base, vuint32m4_t bindex, + vfloat32m4_t value, size_t vl) { + return vsuxei32_v_f32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16f32.nxv16i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv16f32.nxv16i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f32m8_m(vbool4_t mask, float *base, vuint32m8_t bindex, + vfloat32m8_t value, size_t vl) { + return vsuxei32_v_f32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f32.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f32.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f32mf2_m(vbool64_t mask, float *base, vuint64m1_t bindex, + vfloat32mf2_t value, size_t vl) { + return vsuxei64_v_f32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f32.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f32.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f32m1_m(vbool32_t mask, float *base, vuint64m2_t bindex, + vfloat32m1_t value, size_t vl) { + return vsuxei64_v_f32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f32.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f32.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f32m2_m(vbool16_t mask, float *base, vuint64m4_t bindex, + vfloat32m2_t value, size_t vl) { + return vsuxei64_v_f32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f32.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f32.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f32m4_m(vbool8_t mask, float *base, vuint64m8_t bindex, + vfloat32m4_t value, size_t vl) { + return vsuxei64_v_f32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f64.nxv1i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f64.nxv1i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f64m1_m(vbool64_t mask, double *base, vuint8mf8_t bindex, + vfloat64m1_t value, size_t vl) { + return vsuxei8_v_f64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f64.nxv2i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f64.nxv2i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f64m2_m(vbool32_t mask, double *base, vuint8mf4_t bindex, + vfloat64m2_t value, size_t vl) { + return vsuxei8_v_f64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f64.nxv4i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f64.nxv4i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f64m4_m(vbool16_t mask, double *base, vuint8mf2_t bindex, + vfloat64m4_t value, size_t vl) { + return vsuxei8_v_f64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei8_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f64.nxv8i8.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei8_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f64.nxv8i8.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei8_v_f64m8_m(vbool8_t mask, double *base, vuint8m1_t bindex, + vfloat64m8_t value, size_t vl) { + return vsuxei8_v_f64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f64.nxv1i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f64.nxv1i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f64m1_m(vbool64_t mask, double *base, vuint16mf4_t bindex, + vfloat64m1_t value, size_t vl) { + return vsuxei16_v_f64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f64.nxv2i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f64.nxv2i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f64m2_m(vbool32_t mask, double *base, vuint16mf2_t bindex, + vfloat64m2_t value, size_t vl) { + return vsuxei16_v_f64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f64.nxv4i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f64.nxv4i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f64m4_m(vbool16_t mask, double *base, vuint16m1_t bindex, + vfloat64m4_t value, size_t vl) { + return vsuxei16_v_f64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei16_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f64.nxv8i16.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei16_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f64.nxv8i16.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei16_v_f64m8_m(vbool8_t mask, double *base, vuint16m2_t bindex, + vfloat64m8_t value, size_t vl) { + return vsuxei16_v_f64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f64.nxv1i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f64.nxv1i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f64m1_m(vbool64_t mask, double *base, vuint32mf2_t bindex, + vfloat64m1_t value, size_t vl) { + return vsuxei32_v_f64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f64.nxv2i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f64.nxv2i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f64m2_m(vbool32_t mask, double *base, vuint32m1_t bindex, + vfloat64m2_t value, size_t vl) { + return vsuxei32_v_f64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f64.nxv4i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f64.nxv4i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f64m4_m(vbool16_t mask, double *base, vuint32m2_t bindex, + vfloat64m4_t value, size_t vl) { + return vsuxei32_v_f64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei32_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f64.nxv8i32.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei32_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f64.nxv8i32.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei32_v_f64m8_m(vbool8_t mask, double *base, vuint32m4_t bindex, + vfloat64m8_t value, size_t vl) { + return vsuxei32_v_f64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f64.nxv1i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv1f64.nxv1i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f64m1_m(vbool64_t mask, double *base, vuint64m1_t bindex, + vfloat64m1_t value, size_t vl) { + return vsuxei64_v_f64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f64.nxv2i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv2f64.nxv2i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f64m2_m(vbool32_t mask, double *base, vuint64m2_t bindex, + vfloat64m2_t value, size_t vl) { + return vsuxei64_v_f64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f64.nxv4i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv4f64.nxv4i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f64m4_m(vbool16_t mask, double *base, vuint64m4_t bindex, + vfloat64m4_t value, size_t vl) { + return vsuxei64_v_f64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vsuxei64_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f64.nxv8i64.i32( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret void +// +// CHECK-RV64-LABEL: @test_vsuxei64_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: call void @llvm.riscv.vsuxei.mask.nxv8f64.nxv8i64.i64( [[VALUE:%.*]], * [[TMP0]], [[BINDEX:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret void +// +void test_vsuxei64_v_f64m8_m(vbool8_t mask, double *base, vuint64m8_t bindex, + vfloat64m8_t value, size_t vl) { + return vsuxei64_v_f64m8_m(mask, base, bindex, value, vl); +}