diff --git a/clang/include/clang/Basic/riscv_vector.td b/clang/include/clang/Basic/riscv_vector.td --- a/clang/include/clang/Basic/riscv_vector.td +++ b/clang/include/clang/Basic/riscv_vector.td @@ -195,6 +195,9 @@ // an automatic definition in header is emitted. string HeaderCode = ""; + // Sub extension of vector spec. Currently only support Zvamo or Zvlsseg. + string RequiredExtension = ""; + } //===----------------------------------------------------------------------===// @@ -739,6 +742,34 @@ } } +multiclass RVVAMOBuiltinSet { + defvar type_list = !if(has_fp, ["i","l","f","d"], ["i","l"]); + foreach type = type_list in + foreach eew_list = EEWList in { + defvar eew = eew_list[0]; + defvar eew_index = eew_list[1]; + let Name = NAME # "ei" # eew # "_" # "v", + IRName = NAME, + IRNameMask = NAME # "_mask", + HasMaskedOffOperand = false, + ManualCodegen = [{ + // base, bindex, value, vl + IntrinsicTypes = {ResultType, Ops[1]->getType(), Ops[3]->getType()}; + Ops[0] = Builder.CreateBitCast(Ops[0], ResultType->getPointerTo()); + }], + ManualCodegenMask = [{ + IntrinsicTypes = {ResultType, Ops[1]->getType(), Ops[4]->getType()}; + Ops[0] = Builder.CreateBitCast(Ops[0], ResultType->getPointerTo()); + }] in { + if has_signed then + def : RVVBuiltin<"v", "vPe" # eew_index # "Uvv", type>; + if !and(!not(IsFloat.val), has_unsigned) then + def : RVVBuiltin<"Uv", "UvPUe" # eew_index # "UvUv", type>; + } + } +} + // 6. Configuration-Setting Instructions // 6.1. vsetvli/vsetvl instructions let HasVL = false, @@ -849,6 +880,19 @@ defm vle32ff: RVVVLEFFBuiltin<["i", "f"]>; defm vle64ff: RVVVLEFFBuiltin<["l", "d"]>; +// 8. Vector AMO Operations +let RequiredExtension = "Zvamo" in { +defm vamoswap : RVVAMOBuiltinSet< /* hasSigned */ true, /* hasUnsigned */ true, /* hasFP */ true>; +defm vamoadd : RVVAMOBuiltinSet< /* hasSigned */ true, /* hasUnsigned */ true>; +defm vamoxor : RVVAMOBuiltinSet< /* hasSigned */ true, /* hasUnsigned */ true>; +defm vamoand : RVVAMOBuiltinSet< /* hasSigned */ true, /* hasUnsigned */ true>; +defm vamoor : RVVAMOBuiltinSet< /* hasSigned */ true, /* hasUnsigned */ true>; +defm vamomin : RVVAMOBuiltinSet< /* hasSigned */ true>; +defm vamomax : RVVAMOBuiltinSet< /* hasSigned */ true>; +defm vamominu : RVVAMOBuiltinSet< /* hasSigned */ false, /* hasUnsigned */ true>; +defm vamomaxu : RVVAMOBuiltinSet< /* hasSigned */ false, /* hasUnsigned */ true>; +} + // 12. Vector Integer Arithmetic Instructions // 12.1. Vector Single-Width Integer Add and Subtract defm vadd : RVVIntBinBuiltinSet; diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vamoadd.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vamoadd.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vamoadd.c @@ -0,0 +1,2250 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv32 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV32 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -Werror -Wall -o - %s -S >/dev/null 2>&1 +// RUN: FileCheck --check-prefix=ASM --allow-empty %s + +// ASM-NOT: warning +#include + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5:#.*]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5:#.*]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoaddei8_v_i32mf2 (int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamoaddei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoaddei8_v_i32m1 (int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamoaddei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoaddei8_v_i32m2 (int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamoaddei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoaddei8_v_i32m4 (int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamoaddei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoaddei8_v_i32m8 (int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamoaddei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoaddei16_v_i32mf2 (int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamoaddei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoaddei16_v_i32m1 (int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamoaddei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoaddei16_v_i32m2 (int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamoaddei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoaddei16_v_i32m4 (int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamoaddei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoaddei16_v_i32m8 (int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamoaddei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoaddei32_v_i32mf2 (int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamoaddei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoaddei32_v_i32m1 (int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamoaddei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoaddei32_v_i32m2 (int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamoaddei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoaddei32_v_i32m4 (int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamoaddei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoaddei32_v_i32m8 (int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamoaddei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoaddei64_v_i32mf2 (int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamoaddei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoaddei64_v_i32m1 (int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamoaddei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoaddei64_v_i32m2 (int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamoaddei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoaddei64_v_i32m4 (int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamoaddei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoaddei8_v_i64m1 (int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamoaddei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoaddei8_v_i64m2 (int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamoaddei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoaddei8_v_i64m4 (int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamoaddei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoaddei8_v_i64m8 (int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamoaddei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoaddei16_v_i64m1 (int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamoaddei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoaddei16_v_i64m2 (int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamoaddei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoaddei16_v_i64m4 (int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamoaddei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoaddei16_v_i64m8 (int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamoaddei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoaddei32_v_i64m1 (int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamoaddei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoaddei32_v_i64m2 (int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamoaddei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoaddei32_v_i64m4 (int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamoaddei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoaddei32_v_i64m8 (int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamoaddei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoaddei64_v_i64m1 (int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamoaddei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoaddei64_v_i64m2 (int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamoaddei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoaddei64_v_i64m4 (int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamoaddei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoaddei64_v_i64m8 (int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamoaddei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoaddei8_v_u32mf2 (uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamoaddei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoaddei8_v_u32m1 (uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamoaddei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoaddei8_v_u32m2 (uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamoaddei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoaddei8_v_u32m4 (uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamoaddei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoaddei8_v_u32m8 (uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamoaddei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoaddei16_v_u32mf2 (uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamoaddei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoaddei16_v_u32m1 (uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamoaddei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoaddei16_v_u32m2 (uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamoaddei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoaddei16_v_u32m4 (uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamoaddei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoaddei16_v_u32m8 (uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamoaddei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoaddei32_v_u32mf2 (uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamoaddei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoaddei32_v_u32m1 (uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamoaddei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoaddei32_v_u32m2 (uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamoaddei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoaddei32_v_u32m4 (uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamoaddei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoaddei32_v_u32m8 (uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamoaddei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoaddei64_v_u32mf2 (uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamoaddei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoaddei64_v_u32m1 (uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamoaddei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoaddei64_v_u32m2 (uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamoaddei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoaddei64_v_u32m4 (uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamoaddei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoaddei8_v_u64m1 (uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamoaddei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoaddei8_v_u64m2 (uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamoaddei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoaddei8_v_u64m4 (uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamoaddei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoaddei8_v_u64m8 (uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamoaddei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoaddei16_v_u64m1 (uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamoaddei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoaddei16_v_u64m2 (uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamoaddei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoaddei16_v_u64m4 (uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamoaddei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoaddei16_v_u64m8 (uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamoaddei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoaddei32_v_u64m1 (uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamoaddei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoaddei32_v_u64m2 (uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamoaddei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoaddei32_v_u64m4 (uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamoaddei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoaddei32_v_u64m8 (uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamoaddei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoaddei64_v_u64m1 (uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamoaddei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoaddei64_v_u64m2 (uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamoaddei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoaddei64_v_u64m4 (uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamoaddei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoaddei64_v_u64m8 (uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamoaddei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoaddei8_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamoaddei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoaddei8_v_i32m1_m (vbool32_t mask, int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamoaddei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoaddei8_v_i32m2_m (vbool16_t mask, int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamoaddei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoaddei8_v_i32m4_m (vbool8_t mask, int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamoaddei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoaddei8_v_i32m8_m (vbool4_t mask, int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamoaddei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoaddei16_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamoaddei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoaddei16_v_i32m1_m (vbool32_t mask, int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamoaddei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoaddei16_v_i32m2_m (vbool16_t mask, int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamoaddei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoaddei16_v_i32m4_m (vbool8_t mask, int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamoaddei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoaddei16_v_i32m8_m (vbool4_t mask, int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamoaddei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoaddei32_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamoaddei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoaddei32_v_i32m1_m (vbool32_t mask, int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamoaddei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoaddei32_v_i32m2_m (vbool16_t mask, int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamoaddei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoaddei32_v_i32m4_m (vbool8_t mask, int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamoaddei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoaddei32_v_i32m8_m (vbool4_t mask, int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamoaddei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoaddei64_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamoaddei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoaddei64_v_i32m1_m (vbool32_t mask, int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamoaddei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoaddei64_v_i32m2_m (vbool16_t mask, int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamoaddei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoaddei64_v_i32m4_m (vbool8_t mask, int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamoaddei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoaddei8_v_i64m1_m (vbool64_t mask, int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamoaddei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoaddei8_v_i64m2_m (vbool32_t mask, int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamoaddei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoaddei8_v_i64m4_m (vbool16_t mask, int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamoaddei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoaddei8_v_i64m8_m (vbool8_t mask, int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamoaddei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoaddei16_v_i64m1_m (vbool64_t mask, int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamoaddei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoaddei16_v_i64m2_m (vbool32_t mask, int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamoaddei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoaddei16_v_i64m4_m (vbool16_t mask, int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamoaddei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoaddei16_v_i64m8_m (vbool8_t mask, int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamoaddei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoaddei32_v_i64m1_m (vbool64_t mask, int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamoaddei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoaddei32_v_i64m2_m (vbool32_t mask, int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamoaddei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoaddei32_v_i64m4_m (vbool16_t mask, int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamoaddei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoaddei32_v_i64m8_m (vbool8_t mask, int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamoaddei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoaddei64_v_i64m1_m (vbool64_t mask, int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamoaddei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoaddei64_v_i64m2_m (vbool32_t mask, int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamoaddei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoaddei64_v_i64m4_m (vbool16_t mask, int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamoaddei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoaddei64_v_i64m8_m (vbool8_t mask, int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamoaddei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoaddei8_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamoaddei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoaddei8_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamoaddei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoaddei8_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamoaddei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoaddei8_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamoaddei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoaddei8_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamoaddei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoaddei16_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamoaddei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoaddei16_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamoaddei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoaddei16_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamoaddei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoaddei16_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamoaddei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoaddei16_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamoaddei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoaddei32_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamoaddei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoaddei32_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamoaddei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoaddei32_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamoaddei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoaddei32_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamoaddei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoaddei32_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamoaddei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoaddei64_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamoaddei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoaddei64_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamoaddei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoaddei64_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamoaddei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoaddei64_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamoaddei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoaddei8_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamoaddei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoaddei8_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamoaddei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoaddei8_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamoaddei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoaddei8_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamoaddei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoaddei16_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamoaddei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoaddei16_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamoaddei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoaddei16_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamoaddei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoaddei16_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamoaddei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoaddei32_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamoaddei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoaddei32_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamoaddei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoaddei32_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamoaddei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoaddei32_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamoaddei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoaddei64_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamoaddei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoaddei64_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamoaddei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoaddei64_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamoaddei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoaddei64_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamoaddei64(mask, base, bindex, value, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vamoand.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vamoand.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vamoand.c @@ -0,0 +1,2250 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv32 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV32 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -Werror -Wall -o - %s -S >/dev/null 2>&1 +// RUN: FileCheck --check-prefix=ASM --allow-empty %s + +// ASM-NOT: warning +#include + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5:#.*]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5:#.*]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoandei8_v_i32mf2 (int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamoandei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoandei8_v_i32m1 (int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamoandei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoandei8_v_i32m2 (int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamoandei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoandei8_v_i32m4 (int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamoandei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoandei8_v_i32m8 (int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamoandei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoandei16_v_i32mf2 (int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamoandei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoandei16_v_i32m1 (int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamoandei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoandei16_v_i32m2 (int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamoandei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoandei16_v_i32m4 (int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamoandei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoandei16_v_i32m8 (int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamoandei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoandei32_v_i32mf2 (int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamoandei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoandei32_v_i32m1 (int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamoandei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoandei32_v_i32m2 (int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamoandei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoandei32_v_i32m4 (int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamoandei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoandei32_v_i32m8 (int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamoandei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoandei64_v_i32mf2 (int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamoandei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoandei64_v_i32m1 (int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamoandei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoandei64_v_i32m2 (int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamoandei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoandei64_v_i32m4 (int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamoandei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoandei8_v_i64m1 (int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamoandei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoandei8_v_i64m2 (int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamoandei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoandei8_v_i64m4 (int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamoandei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoandei8_v_i64m8 (int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamoandei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoandei16_v_i64m1 (int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamoandei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoandei16_v_i64m2 (int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamoandei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoandei16_v_i64m4 (int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamoandei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoandei16_v_i64m8 (int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamoandei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoandei32_v_i64m1 (int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamoandei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoandei32_v_i64m2 (int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamoandei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoandei32_v_i64m4 (int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamoandei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoandei32_v_i64m8 (int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamoandei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoandei64_v_i64m1 (int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamoandei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoandei64_v_i64m2 (int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamoandei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoandei64_v_i64m4 (int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamoandei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoandei64_v_i64m8 (int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamoandei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoandei8_v_u32mf2 (uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamoandei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoandei8_v_u32m1 (uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamoandei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoandei8_v_u32m2 (uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamoandei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoandei8_v_u32m4 (uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamoandei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoandei8_v_u32m8 (uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamoandei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoandei16_v_u32mf2 (uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamoandei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoandei16_v_u32m1 (uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamoandei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoandei16_v_u32m2 (uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamoandei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoandei16_v_u32m4 (uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamoandei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoandei16_v_u32m8 (uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamoandei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoandei32_v_u32mf2 (uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamoandei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoandei32_v_u32m1 (uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamoandei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoandei32_v_u32m2 (uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamoandei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoandei32_v_u32m4 (uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamoandei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoandei32_v_u32m8 (uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamoandei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoandei64_v_u32mf2 (uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamoandei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoandei64_v_u32m1 (uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamoandei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoandei64_v_u32m2 (uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamoandei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoandei64_v_u32m4 (uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamoandei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoandei8_v_u64m1 (uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamoandei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoandei8_v_u64m2 (uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamoandei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoandei8_v_u64m4 (uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamoandei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoandei8_v_u64m8 (uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamoandei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoandei16_v_u64m1 (uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamoandei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoandei16_v_u64m2 (uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamoandei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoandei16_v_u64m4 (uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamoandei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoandei16_v_u64m8 (uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamoandei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoandei32_v_u64m1 (uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamoandei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoandei32_v_u64m2 (uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamoandei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoandei32_v_u64m4 (uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamoandei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoandei32_v_u64m8 (uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamoandei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoandei64_v_u64m1 (uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamoandei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoandei64_v_u64m2 (uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamoandei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoandei64_v_u64m4 (uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamoandei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoandei64_v_u64m8 (uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamoandei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoandei8_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamoandei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoandei8_v_i32m1_m (vbool32_t mask, int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamoandei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoandei8_v_i32m2_m (vbool16_t mask, int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamoandei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoandei8_v_i32m4_m (vbool8_t mask, int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamoandei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoandei8_v_i32m8_m (vbool4_t mask, int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamoandei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoandei16_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamoandei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoandei16_v_i32m1_m (vbool32_t mask, int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamoandei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoandei16_v_i32m2_m (vbool16_t mask, int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamoandei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoandei16_v_i32m4_m (vbool8_t mask, int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamoandei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoandei16_v_i32m8_m (vbool4_t mask, int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamoandei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoandei32_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamoandei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoandei32_v_i32m1_m (vbool32_t mask, int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamoandei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoandei32_v_i32m2_m (vbool16_t mask, int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamoandei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoandei32_v_i32m4_m (vbool8_t mask, int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamoandei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoandei32_v_i32m8_m (vbool4_t mask, int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamoandei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoandei64_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamoandei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoandei64_v_i32m1_m (vbool32_t mask, int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamoandei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoandei64_v_i32m2_m (vbool16_t mask, int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamoandei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoandei64_v_i32m4_m (vbool8_t mask, int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamoandei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoandei8_v_i64m1_m (vbool64_t mask, int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamoandei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoandei8_v_i64m2_m (vbool32_t mask, int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamoandei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoandei8_v_i64m4_m (vbool16_t mask, int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamoandei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoandei8_v_i64m8_m (vbool8_t mask, int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamoandei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoandei16_v_i64m1_m (vbool64_t mask, int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamoandei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoandei16_v_i64m2_m (vbool32_t mask, int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamoandei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoandei16_v_i64m4_m (vbool16_t mask, int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamoandei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoandei16_v_i64m8_m (vbool8_t mask, int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamoandei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoandei32_v_i64m1_m (vbool64_t mask, int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamoandei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoandei32_v_i64m2_m (vbool32_t mask, int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamoandei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoandei32_v_i64m4_m (vbool16_t mask, int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamoandei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoandei32_v_i64m8_m (vbool8_t mask, int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamoandei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoandei64_v_i64m1_m (vbool64_t mask, int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamoandei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoandei64_v_i64m2_m (vbool32_t mask, int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamoandei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoandei64_v_i64m4_m (vbool16_t mask, int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamoandei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoandei64_v_i64m8_m (vbool8_t mask, int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamoandei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoandei8_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamoandei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoandei8_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamoandei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoandei8_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamoandei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoandei8_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamoandei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoandei8_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamoandei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoandei16_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamoandei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoandei16_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamoandei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoandei16_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamoandei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoandei16_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamoandei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoandei16_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamoandei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoandei32_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamoandei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoandei32_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamoandei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoandei32_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamoandei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoandei32_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamoandei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoandei32_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamoandei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoandei64_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamoandei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoandei64_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamoandei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoandei64_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamoandei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoandei64_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamoandei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoandei8_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamoandei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoandei8_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamoandei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoandei8_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamoandei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoandei8_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamoandei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoandei16_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamoandei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoandei16_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamoandei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoandei16_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamoandei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoandei16_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamoandei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoandei32_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamoandei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoandei32_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamoandei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoandei32_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamoandei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoandei32_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamoandei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoandei64_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamoandei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoandei64_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamoandei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoandei64_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamoandei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoandei64_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamoandei64(mask, base, bindex, value, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vamomax.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vamomax.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vamomax.c @@ -0,0 +1,2250 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv32 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV32 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -Werror -Wall -o - %s -S >/dev/null 2>&1 +// RUN: FileCheck --check-prefix=ASM --allow-empty %s + +// ASM-NOT: warning +#include + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5:#.*]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5:#.*]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamomaxei8_v_i32mf2 (int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamomaxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamomaxei8_v_i32m1 (int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamomaxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamomaxei8_v_i32m2 (int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamomaxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamomaxei8_v_i32m4 (int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamomaxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamomaxei8_v_i32m8 (int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamomaxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamomaxei16_v_i32mf2 (int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamomaxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamomaxei16_v_i32m1 (int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamomaxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamomaxei16_v_i32m2 (int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamomaxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamomaxei16_v_i32m4 (int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamomaxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamomaxei16_v_i32m8 (int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamomaxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamomaxei32_v_i32mf2 (int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamomaxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamomaxei32_v_i32m1 (int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamomaxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamomaxei32_v_i32m2 (int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamomaxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamomaxei32_v_i32m4 (int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamomaxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamomaxei32_v_i32m8 (int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamomaxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamomaxei64_v_i32mf2 (int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamomaxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamomaxei64_v_i32m1 (int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamomaxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamomaxei64_v_i32m2 (int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamomaxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamomaxei64_v_i32m4 (int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamomaxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamomaxei8_v_i64m1 (int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamomaxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamomaxei8_v_i64m2 (int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamomaxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamomaxei8_v_i64m4 (int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamomaxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamomaxei8_v_i64m8 (int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamomaxei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamomaxei16_v_i64m1 (int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamomaxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamomaxei16_v_i64m2 (int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamomaxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamomaxei16_v_i64m4 (int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamomaxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamomaxei16_v_i64m8 (int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamomaxei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamomaxei32_v_i64m1 (int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamomaxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamomaxei32_v_i64m2 (int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamomaxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamomaxei32_v_i64m4 (int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamomaxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamomaxei32_v_i64m8 (int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamomaxei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamomaxei64_v_i64m1 (int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamomaxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamomaxei64_v_i64m2 (int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamomaxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamomaxei64_v_i64m4 (int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamomaxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamomaxei64_v_i64m8 (int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamomaxei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamomaxuei8_v_u32mf2 (uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamomaxuei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamomaxuei8_v_u32m1 (uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamomaxuei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamomaxuei8_v_u32m2 (uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamomaxuei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamomaxuei8_v_u32m4 (uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamomaxuei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamomaxuei8_v_u32m8 (uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamomaxuei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamomaxuei16_v_u32mf2 (uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamomaxuei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamomaxuei16_v_u32m1 (uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamomaxuei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamomaxuei16_v_u32m2 (uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamomaxuei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamomaxuei16_v_u32m4 (uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamomaxuei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamomaxuei16_v_u32m8 (uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamomaxuei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamomaxuei32_v_u32mf2 (uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamomaxuei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamomaxuei32_v_u32m1 (uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamomaxuei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamomaxuei32_v_u32m2 (uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamomaxuei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamomaxuei32_v_u32m4 (uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamomaxuei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamomaxuei32_v_u32m8 (uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamomaxuei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamomaxuei64_v_u32mf2 (uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamomaxuei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamomaxuei64_v_u32m1 (uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamomaxuei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamomaxuei64_v_u32m2 (uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamomaxuei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamomaxuei64_v_u32m4 (uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamomaxuei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamomaxuei8_v_u64m1 (uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamomaxuei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamomaxuei8_v_u64m2 (uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamomaxuei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamomaxuei8_v_u64m4 (uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamomaxuei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamomaxuei8_v_u64m8 (uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamomaxuei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamomaxuei16_v_u64m1 (uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamomaxuei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamomaxuei16_v_u64m2 (uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamomaxuei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamomaxuei16_v_u64m4 (uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamomaxuei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamomaxuei16_v_u64m8 (uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamomaxuei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamomaxuei32_v_u64m1 (uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamomaxuei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamomaxuei32_v_u64m2 (uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamomaxuei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamomaxuei32_v_u64m4 (uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamomaxuei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamomaxuei32_v_u64m8 (uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamomaxuei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamomaxuei64_v_u64m1 (uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamomaxuei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamomaxuei64_v_u64m2 (uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamomaxuei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamomaxuei64_v_u64m4 (uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamomaxuei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamomaxuei64_v_u64m8 (uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamomaxuei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamomaxei8_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamomaxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamomaxei8_v_i32m1_m (vbool32_t mask, int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamomaxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamomaxei8_v_i32m2_m (vbool16_t mask, int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamomaxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamomaxei8_v_i32m4_m (vbool8_t mask, int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamomaxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamomaxei8_v_i32m8_m (vbool4_t mask, int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamomaxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamomaxei16_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamomaxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamomaxei16_v_i32m1_m (vbool32_t mask, int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamomaxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamomaxei16_v_i32m2_m (vbool16_t mask, int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamomaxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamomaxei16_v_i32m4_m (vbool8_t mask, int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamomaxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamomaxei16_v_i32m8_m (vbool4_t mask, int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamomaxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamomaxei32_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamomaxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamomaxei32_v_i32m1_m (vbool32_t mask, int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamomaxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamomaxei32_v_i32m2_m (vbool16_t mask, int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamomaxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamomaxei32_v_i32m4_m (vbool8_t mask, int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamomaxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamomaxei32_v_i32m8_m (vbool4_t mask, int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamomaxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamomaxei64_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamomaxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamomaxei64_v_i32m1_m (vbool32_t mask, int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamomaxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamomaxei64_v_i32m2_m (vbool16_t mask, int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamomaxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamomaxei64_v_i32m4_m (vbool8_t mask, int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamomaxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamomaxei8_v_i64m1_m (vbool64_t mask, int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamomaxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamomaxei8_v_i64m2_m (vbool32_t mask, int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamomaxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamomaxei8_v_i64m4_m (vbool16_t mask, int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamomaxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamomaxei8_v_i64m8_m (vbool8_t mask, int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamomaxei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamomaxei16_v_i64m1_m (vbool64_t mask, int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamomaxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamomaxei16_v_i64m2_m (vbool32_t mask, int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamomaxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamomaxei16_v_i64m4_m (vbool16_t mask, int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamomaxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamomaxei16_v_i64m8_m (vbool8_t mask, int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamomaxei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamomaxei32_v_i64m1_m (vbool64_t mask, int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamomaxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamomaxei32_v_i64m2_m (vbool32_t mask, int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamomaxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamomaxei32_v_i64m4_m (vbool16_t mask, int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamomaxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamomaxei32_v_i64m8_m (vbool8_t mask, int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamomaxei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamomaxei64_v_i64m1_m (vbool64_t mask, int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamomaxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamomaxei64_v_i64m2_m (vbool32_t mask, int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamomaxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamomaxei64_v_i64m4_m (vbool16_t mask, int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamomaxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamomaxei64_v_i64m8_m (vbool8_t mask, int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamomaxei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamomaxuei8_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamomaxuei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamomaxuei8_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamomaxuei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamomaxuei8_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamomaxuei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamomaxuei8_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamomaxuei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamomaxuei8_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamomaxuei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamomaxuei16_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamomaxuei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamomaxuei16_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamomaxuei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamomaxuei16_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamomaxuei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamomaxuei16_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamomaxuei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamomaxuei16_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamomaxuei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamomaxuei32_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamomaxuei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamomaxuei32_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamomaxuei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamomaxuei32_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamomaxuei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamomaxuei32_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamomaxuei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamomaxuei32_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamomaxuei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamomaxuei64_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamomaxuei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamomaxuei64_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamomaxuei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamomaxuei64_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamomaxuei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamomaxuei64_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamomaxuei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamomaxuei8_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamomaxuei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamomaxuei8_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamomaxuei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamomaxuei8_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamomaxuei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamomaxuei8_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamomaxuei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamomaxuei16_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamomaxuei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamomaxuei16_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamomaxuei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamomaxuei16_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamomaxuei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamomaxuei16_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamomaxuei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamomaxuei32_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamomaxuei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamomaxuei32_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamomaxuei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamomaxuei32_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamomaxuei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamomaxuei32_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamomaxuei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamomaxuei64_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamomaxuei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamomaxuei64_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamomaxuei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamomaxuei64_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamomaxuei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamomaxuei64_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamomaxuei64(mask, base, bindex, value, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vamomin.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vamomin.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vamomin.c @@ -0,0 +1,2250 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv32 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV32 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -Werror -Wall -o - %s -S >/dev/null 2>&1 +// RUN: FileCheck --check-prefix=ASM --allow-empty %s + +// ASM-NOT: warning +#include + +// CHECK-RV32-LABEL: @test_vamominei8_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5:#.*]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5:#.*]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamominei8_v_i32mf2 (int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamominei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamominei8_v_i32m1 (int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamominei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamominei8_v_i32m2 (int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamominei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamominei8_v_i32m4 (int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamominei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamominei8_v_i32m8 (int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamominei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamominei16_v_i32mf2 (int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamominei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamominei16_v_i32m1 (int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamominei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamominei16_v_i32m2 (int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamominei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamominei16_v_i32m4 (int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamominei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamominei16_v_i32m8 (int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamominei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamominei32_v_i32mf2 (int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamominei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamominei32_v_i32m1 (int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamominei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamominei32_v_i32m2 (int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamominei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamominei32_v_i32m4 (int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamominei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamominei32_v_i32m8 (int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamominei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamominei64_v_i32mf2 (int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamominei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamominei64_v_i32m1 (int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamominei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamominei64_v_i32m2 (int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamominei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamominei64_v_i32m4 (int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamominei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamominei8_v_i64m1 (int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamominei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamominei8_v_i64m2 (int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamominei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamominei8_v_i64m4 (int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamominei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamominei8_v_i64m8 (int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamominei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamominei16_v_i64m1 (int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamominei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamominei16_v_i64m2 (int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamominei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamominei16_v_i64m4 (int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamominei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamominei16_v_i64m8 (int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamominei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamominei32_v_i64m1 (int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamominei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamominei32_v_i64m2 (int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamominei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamominei32_v_i64m4 (int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamominei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamominei32_v_i64m8 (int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamominei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamominei64_v_i64m1 (int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamominei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamominei64_v_i64m2 (int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamominei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamominei64_v_i64m4 (int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamominei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamominei64_v_i64m8 (int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamominei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamominuei8_v_u32mf2 (uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamominuei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamominuei8_v_u32m1 (uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamominuei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamominuei8_v_u32m2 (uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamominuei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamominuei8_v_u32m4 (uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamominuei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamominuei8_v_u32m8 (uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamominuei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamominuei16_v_u32mf2 (uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamominuei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamominuei16_v_u32m1 (uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamominuei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamominuei16_v_u32m2 (uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamominuei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamominuei16_v_u32m4 (uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamominuei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamominuei16_v_u32m8 (uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamominuei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamominuei32_v_u32mf2 (uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamominuei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamominuei32_v_u32m1 (uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamominuei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamominuei32_v_u32m2 (uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamominuei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamominuei32_v_u32m4 (uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamominuei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamominuei32_v_u32m8 (uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamominuei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamominuei64_v_u32mf2 (uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamominuei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamominuei64_v_u32m1 (uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamominuei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamominuei64_v_u32m2 (uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamominuei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamominuei64_v_u32m4 (uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamominuei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamominuei8_v_u64m1 (uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamominuei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamominuei8_v_u64m2 (uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamominuei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamominuei8_v_u64m4 (uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamominuei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamominuei8_v_u64m8 (uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamominuei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamominuei16_v_u64m1 (uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamominuei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamominuei16_v_u64m2 (uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamominuei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamominuei16_v_u64m4 (uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamominuei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamominuei16_v_u64m8 (uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamominuei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamominuei32_v_u64m1 (uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamominuei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamominuei32_v_u64m2 (uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamominuei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamominuei32_v_u64m4 (uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamominuei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamominuei32_v_u64m8 (uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamominuei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamominuei64_v_u64m1 (uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamominuei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamominuei64_v_u64m2 (uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamominuei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamominuei64_v_u64m4 (uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamominuei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamominuei64_v_u64m8 (uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamominuei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamominei8_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamominei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamominei8_v_i32m1_m (vbool32_t mask, int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamominei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamominei8_v_i32m2_m (vbool16_t mask, int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamominei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamominei8_v_i32m4_m (vbool8_t mask, int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamominei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamominei8_v_i32m8_m (vbool4_t mask, int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamominei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamominei16_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamominei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamominei16_v_i32m1_m (vbool32_t mask, int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamominei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamominei16_v_i32m2_m (vbool16_t mask, int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamominei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamominei16_v_i32m4_m (vbool8_t mask, int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamominei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamominei16_v_i32m8_m (vbool4_t mask, int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamominei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamominei32_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamominei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamominei32_v_i32m1_m (vbool32_t mask, int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamominei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamominei32_v_i32m2_m (vbool16_t mask, int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamominei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamominei32_v_i32m4_m (vbool8_t mask, int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamominei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamominei32_v_i32m8_m (vbool4_t mask, int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamominei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamominei64_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamominei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamominei64_v_i32m1_m (vbool32_t mask, int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamominei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamominei64_v_i32m2_m (vbool16_t mask, int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamominei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamominei64_v_i32m4_m (vbool8_t mask, int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamominei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamominei8_v_i64m1_m (vbool64_t mask, int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamominei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamominei8_v_i64m2_m (vbool32_t mask, int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamominei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamominei8_v_i64m4_m (vbool16_t mask, int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamominei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamominei8_v_i64m8_m (vbool8_t mask, int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamominei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamominei16_v_i64m1_m (vbool64_t mask, int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamominei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamominei16_v_i64m2_m (vbool32_t mask, int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamominei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamominei16_v_i64m4_m (vbool16_t mask, int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamominei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamominei16_v_i64m8_m (vbool8_t mask, int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamominei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamominei32_v_i64m1_m (vbool64_t mask, int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamominei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamominei32_v_i64m2_m (vbool32_t mask, int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamominei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamominei32_v_i64m4_m (vbool16_t mask, int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamominei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamominei32_v_i64m8_m (vbool8_t mask, int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamominei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamominei64_v_i64m1_m (vbool64_t mask, int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamominei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamominei64_v_i64m2_m (vbool32_t mask, int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamominei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamominei64_v_i64m4_m (vbool16_t mask, int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamominei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamominei64_v_i64m8_m (vbool8_t mask, int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamominei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamominuei8_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamominuei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamominuei8_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamominuei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamominuei8_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamominuei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamominuei8_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamominuei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamominuei8_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamominuei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamominuei16_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamominuei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamominuei16_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamominuei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamominuei16_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamominuei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamominuei16_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamominuei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamominuei16_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamominuei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamominuei32_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamominuei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamominuei32_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamominuei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamominuei32_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamominuei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamominuei32_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamominuei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamominuei32_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamominuei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamominuei64_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamominuei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamominuei64_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamominuei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamominuei64_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamominuei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamominuei64_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamominuei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamominuei8_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamominuei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamominuei8_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamominuei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamominuei8_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamominuei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamominuei8_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamominuei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamominuei16_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamominuei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamominuei16_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamominuei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamominuei16_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamominuei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamominuei16_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamominuei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamominuei32_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamominuei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamominuei32_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamominuei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamominuei32_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamominuei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamominuei32_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamominuei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamominuei64_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamominuei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamominuei64_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamominuei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamominuei64_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamominuei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamominuei64_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamominuei64(mask, base, bindex, value, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vamoor.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vamoor.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vamoor.c @@ -0,0 +1,2250 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv32 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV32 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -Werror -Wall -o - %s -S >/dev/null 2>&1 +// RUN: FileCheck --check-prefix=ASM --allow-empty %s + +// ASM-NOT: warning +#include + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5:#.*]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5:#.*]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoorei8_v_i32mf2 (int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamoorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoorei8_v_i32m1 (int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamoorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoorei8_v_i32m2 (int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamoorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoorei8_v_i32m4 (int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamoorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoorei8_v_i32m8 (int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamoorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoorei16_v_i32mf2 (int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamoorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoorei16_v_i32m1 (int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamoorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoorei16_v_i32m2 (int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamoorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoorei16_v_i32m4 (int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamoorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoorei16_v_i32m8 (int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamoorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoorei32_v_i32mf2 (int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamoorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoorei32_v_i32m1 (int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamoorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoorei32_v_i32m2 (int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamoorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoorei32_v_i32m4 (int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamoorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoorei32_v_i32m8 (int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamoorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoorei64_v_i32mf2 (int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamoorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoorei64_v_i32m1 (int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamoorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoorei64_v_i32m2 (int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamoorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoorei64_v_i32m4 (int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamoorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoorei8_v_i64m1 (int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamoorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoorei8_v_i64m2 (int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamoorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoorei8_v_i64m4 (int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamoorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoorei8_v_i64m8 (int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamoorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoorei16_v_i64m1 (int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamoorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoorei16_v_i64m2 (int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamoorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoorei16_v_i64m4 (int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamoorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoorei16_v_i64m8 (int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamoorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoorei32_v_i64m1 (int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamoorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoorei32_v_i64m2 (int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamoorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoorei32_v_i64m4 (int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamoorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoorei32_v_i64m8 (int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamoorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoorei64_v_i64m1 (int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamoorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoorei64_v_i64m2 (int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamoorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoorei64_v_i64m4 (int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamoorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoorei64_v_i64m8 (int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamoorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoorei8_v_u32mf2 (uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamoorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoorei8_v_u32m1 (uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamoorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoorei8_v_u32m2 (uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamoorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoorei8_v_u32m4 (uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamoorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoorei8_v_u32m8 (uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamoorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoorei16_v_u32mf2 (uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamoorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoorei16_v_u32m1 (uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamoorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoorei16_v_u32m2 (uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamoorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoorei16_v_u32m4 (uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamoorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoorei16_v_u32m8 (uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamoorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoorei32_v_u32mf2 (uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamoorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoorei32_v_u32m1 (uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamoorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoorei32_v_u32m2 (uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamoorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoorei32_v_u32m4 (uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamoorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoorei32_v_u32m8 (uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamoorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoorei64_v_u32mf2 (uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamoorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoorei64_v_u32m1 (uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamoorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoorei64_v_u32m2 (uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamoorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoorei64_v_u32m4 (uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamoorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoorei8_v_u64m1 (uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamoorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoorei8_v_u64m2 (uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamoorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoorei8_v_u64m4 (uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamoorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoorei8_v_u64m8 (uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamoorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoorei16_v_u64m1 (uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamoorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoorei16_v_u64m2 (uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamoorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoorei16_v_u64m4 (uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamoorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoorei16_v_u64m8 (uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamoorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoorei32_v_u64m1 (uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamoorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoorei32_v_u64m2 (uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamoorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoorei32_v_u64m4 (uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamoorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoorei32_v_u64m8 (uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamoorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoorei64_v_u64m1 (uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamoorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoorei64_v_u64m2 (uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamoorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoorei64_v_u64m4 (uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamoorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoorei64_v_u64m8 (uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamoorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoorei8_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamoorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoorei8_v_i32m1_m (vbool32_t mask, int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamoorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoorei8_v_i32m2_m (vbool16_t mask, int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamoorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoorei8_v_i32m4_m (vbool8_t mask, int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamoorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoorei8_v_i32m8_m (vbool4_t mask, int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamoorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoorei16_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamoorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoorei16_v_i32m1_m (vbool32_t mask, int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamoorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoorei16_v_i32m2_m (vbool16_t mask, int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamoorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoorei16_v_i32m4_m (vbool8_t mask, int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamoorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoorei16_v_i32m8_m (vbool4_t mask, int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamoorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoorei32_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamoorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoorei32_v_i32m1_m (vbool32_t mask, int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamoorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoorei32_v_i32m2_m (vbool16_t mask, int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamoorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoorei32_v_i32m4_m (vbool8_t mask, int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamoorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoorei32_v_i32m8_m (vbool4_t mask, int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamoorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoorei64_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamoorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoorei64_v_i32m1_m (vbool32_t mask, int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamoorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoorei64_v_i32m2_m (vbool16_t mask, int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamoorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoorei64_v_i32m4_m (vbool8_t mask, int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamoorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoorei8_v_i64m1_m (vbool64_t mask, int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamoorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoorei8_v_i64m2_m (vbool32_t mask, int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamoorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoorei8_v_i64m4_m (vbool16_t mask, int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamoorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoorei8_v_i64m8_m (vbool8_t mask, int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamoorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoorei16_v_i64m1_m (vbool64_t mask, int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamoorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoorei16_v_i64m2_m (vbool32_t mask, int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamoorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoorei16_v_i64m4_m (vbool16_t mask, int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamoorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoorei16_v_i64m8_m (vbool8_t mask, int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamoorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoorei32_v_i64m1_m (vbool64_t mask, int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamoorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoorei32_v_i64m2_m (vbool32_t mask, int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamoorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoorei32_v_i64m4_m (vbool16_t mask, int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamoorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoorei32_v_i64m8_m (vbool8_t mask, int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamoorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoorei64_v_i64m1_m (vbool64_t mask, int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamoorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoorei64_v_i64m2_m (vbool32_t mask, int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamoorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoorei64_v_i64m4_m (vbool16_t mask, int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamoorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoorei64_v_i64m8_m (vbool8_t mask, int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamoorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoorei8_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamoorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoorei8_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamoorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoorei8_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamoorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoorei8_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamoorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoorei8_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamoorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoorei16_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamoorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoorei16_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamoorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoorei16_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamoorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoorei16_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamoorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoorei16_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamoorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoorei32_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamoorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoorei32_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamoorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoorei32_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamoorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoorei32_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamoorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoorei32_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamoorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoorei64_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamoorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoorei64_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamoorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoorei64_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamoorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoorei64_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamoorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoorei8_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamoorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoorei8_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamoorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoorei8_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamoorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoorei8_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamoorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoorei16_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamoorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoorei16_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamoorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoorei16_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamoorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoorei16_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamoorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoorei32_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamoorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoorei32_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamoorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoorei32_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamoorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoorei32_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamoorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoorei64_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamoorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoorei64_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamoorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoorei64_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamoorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoorei64_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamoorei64(mask, base, bindex, value, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vamoswap.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vamoswap.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vamoswap.c @@ -0,0 +1,3687 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv32 -target-feature +m -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -target-feature +experimental-zfh -target-feature +experimental-zvamo -disable-O0-optnone -fallow-half-arguments-and-returns -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV32 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -target-feature +experimental-zfh -target-feature +experimental-zvamo -disable-O0-optnone -fallow-half-arguments-and-returns -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -target-feature +experimental-zfh -target-feature +experimental-zvamo -fallow-half-arguments-and-returns -Werror -Wall -o - %s -S >/dev/null 2>&1 +// RUN: FileCheck --check-prefix=ASM --allow-empty %s + +// ASM-NOT: warning +#include + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5:#.*]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5:#.*]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoswapei8_v_i32mf2(int32_t *base, vuint8mf8_t bindex, + vint32mf2_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoswapei8_v_i32m1(int32_t *base, vuint8mf4_t bindex, + vint32m1_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoswapei8_v_i32m2(int32_t *base, vuint8mf2_t bindex, + vint32m2_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoswapei8_v_i32m4(int32_t *base, vuint8m1_t bindex, + vint32m4_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoswapei8_v_i32m8(int32_t *base, vuint8m2_t bindex, + vint32m8_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoswapei16_v_i32mf2(int32_t *base, vuint16mf4_t bindex, + vint32mf2_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoswapei16_v_i32m1(int32_t *base, vuint16mf2_t bindex, + vint32m1_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoswapei16_v_i32m2(int32_t *base, vuint16m1_t bindex, + vint32m2_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoswapei16_v_i32m4(int32_t *base, vuint16m2_t bindex, + vint32m4_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoswapei16_v_i32m8(int32_t *base, vuint16m4_t bindex, + vint32m8_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoswapei32_v_i32mf2(int32_t *base, vuint32mf2_t bindex, + vint32mf2_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoswapei32_v_i32m1(int32_t *base, vuint32m1_t bindex, + vint32m1_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoswapei32_v_i32m2(int32_t *base, vuint32m2_t bindex, + vint32m2_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoswapei32_v_i32m4(int32_t *base, vuint32m4_t bindex, + vint32m4_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoswapei32_v_i32m8(int32_t *base, vuint32m8_t bindex, + vint32m8_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoswapei64_v_i32mf2(int32_t *base, vuint64m1_t bindex, + vint32mf2_t value, size_t vl) { + return vamoswapei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoswapei64_v_i32m1(int32_t *base, vuint64m2_t bindex, + vint32m1_t value, size_t vl) { + return vamoswapei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoswapei64_v_i32m2(int32_t *base, vuint64m4_t bindex, + vint32m2_t value, size_t vl) { + return vamoswapei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoswapei64_v_i32m4(int32_t *base, vuint64m8_t bindex, + vint32m4_t value, size_t vl) { + return vamoswapei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoswapei8_v_i64m1(int64_t *base, vuint8mf8_t bindex, + vint64m1_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoswapei8_v_i64m2(int64_t *base, vuint8mf4_t bindex, + vint64m2_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoswapei8_v_i64m4(int64_t *base, vuint8mf2_t bindex, + vint64m4_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoswapei8_v_i64m8(int64_t *base, vuint8m1_t bindex, + vint64m8_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoswapei16_v_i64m1(int64_t *base, vuint16mf4_t bindex, + vint64m1_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoswapei16_v_i64m2(int64_t *base, vuint16mf2_t bindex, + vint64m2_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoswapei16_v_i64m4(int64_t *base, vuint16m1_t bindex, + vint64m4_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoswapei16_v_i64m8(int64_t *base, vuint16m2_t bindex, + vint64m8_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoswapei32_v_i64m1(int64_t *base, vuint32mf2_t bindex, + vint64m1_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoswapei32_v_i64m2(int64_t *base, vuint32m1_t bindex, + vint64m2_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoswapei32_v_i64m4(int64_t *base, vuint32m2_t bindex, + vint64m4_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoswapei32_v_i64m8(int64_t *base, vuint32m4_t bindex, + vint64m8_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoswapei64_v_i64m1(int64_t *base, vuint64m1_t bindex, + vint64m1_t value, size_t vl) { + return vamoswapei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoswapei64_v_i64m2(int64_t *base, vuint64m2_t bindex, + vint64m2_t value, size_t vl) { + return vamoswapei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoswapei64_v_i64m4(int64_t *base, vuint64m4_t bindex, + vint64m4_t value, size_t vl) { + return vamoswapei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoswapei64_v_i64m8(int64_t *base, vuint64m8_t bindex, + vint64m8_t value, size_t vl) { + return vamoswapei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoswapei8_v_u32mf2(uint32_t *base, vuint8mf8_t bindex, + vuint32mf2_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoswapei8_v_u32m1(uint32_t *base, vuint8mf4_t bindex, + vuint32m1_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoswapei8_v_u32m2(uint32_t *base, vuint8mf2_t bindex, + vuint32m2_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoswapei8_v_u32m4(uint32_t *base, vuint8m1_t bindex, + vuint32m4_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoswapei8_v_u32m8(uint32_t *base, vuint8m2_t bindex, + vuint32m8_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoswapei16_v_u32mf2(uint32_t *base, vuint16mf4_t bindex, + vuint32mf2_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoswapei16_v_u32m1(uint32_t *base, vuint16mf2_t bindex, + vuint32m1_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoswapei16_v_u32m2(uint32_t *base, vuint16m1_t bindex, + vuint32m2_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoswapei16_v_u32m4(uint32_t *base, vuint16m2_t bindex, + vuint32m4_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoswapei16_v_u32m8(uint32_t *base, vuint16m4_t bindex, + vuint32m8_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoswapei32_v_u32mf2(uint32_t *base, vuint32mf2_t bindex, + vuint32mf2_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoswapei32_v_u32m1(uint32_t *base, vuint32m1_t bindex, + vuint32m1_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoswapei32_v_u32m2(uint32_t *base, vuint32m2_t bindex, + vuint32m2_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoswapei32_v_u32m4(uint32_t *base, vuint32m4_t bindex, + vuint32m4_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoswapei32_v_u32m8(uint32_t *base, vuint32m8_t bindex, + vuint32m8_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoswapei64_v_u32mf2(uint32_t *base, vuint64m1_t bindex, + vuint32mf2_t value, size_t vl) { + return vamoswapei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoswapei64_v_u32m1(uint32_t *base, vuint64m2_t bindex, + vuint32m1_t value, size_t vl) { + return vamoswapei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoswapei64_v_u32m2(uint32_t *base, vuint64m4_t bindex, + vuint32m2_t value, size_t vl) { + return vamoswapei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoswapei64_v_u32m4(uint32_t *base, vuint64m8_t bindex, + vuint32m4_t value, size_t vl) { + return vamoswapei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoswapei8_v_u64m1(uint64_t *base, vuint8mf8_t bindex, + vuint64m1_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoswapei8_v_u64m2(uint64_t *base, vuint8mf4_t bindex, + vuint64m2_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoswapei8_v_u64m4(uint64_t *base, vuint8mf2_t bindex, + vuint64m4_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoswapei8_v_u64m8(uint64_t *base, vuint8m1_t bindex, + vuint64m8_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoswapei16_v_u64m1(uint64_t *base, vuint16mf4_t bindex, + vuint64m1_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoswapei16_v_u64m2(uint64_t *base, vuint16mf2_t bindex, + vuint64m2_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoswapei16_v_u64m4(uint64_t *base, vuint16m1_t bindex, + vuint64m4_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoswapei16_v_u64m8(uint64_t *base, vuint16m2_t bindex, + vuint64m8_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoswapei32_v_u64m1(uint64_t *base, vuint32mf2_t bindex, + vuint64m1_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoswapei32_v_u64m2(uint64_t *base, vuint32m1_t bindex, + vuint64m2_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoswapei32_v_u64m4(uint64_t *base, vuint32m2_t bindex, + vuint64m4_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoswapei32_v_u64m8(uint64_t *base, vuint32m4_t bindex, + vuint64m8_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoswapei64_v_u64m1(uint64_t *base, vuint64m1_t bindex, + vuint64m1_t value, size_t vl) { + return vamoswapei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoswapei64_v_u64m2(uint64_t *base, vuint64m2_t bindex, + vuint64m2_t value, size_t vl) { + return vamoswapei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoswapei64_v_u64m4(uint64_t *base, vuint64m4_t bindex, + vuint64m4_t value, size_t vl) { + return vamoswapei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoswapei64_v_u64m8(uint64_t *base, vuint64m8_t bindex, + vuint64m8_t value, size_t vl) { + return vamoswapei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32mf2_t test_vamoswapei8_v_f32mf2(float *base, vuint8mf8_t bindex, + vfloat32mf2_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m1_t test_vamoswapei8_v_f32m1(float *base, vuint8mf4_t bindex, + vfloat32m1_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m2_t test_vamoswapei8_v_f32m2(float *base, vuint8mf2_t bindex, + vfloat32m2_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m4_t test_vamoswapei8_v_f32m4(float *base, vuint8m1_t bindex, + vfloat32m4_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16f32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16f32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m8_t test_vamoswapei8_v_f32m8(float *base, vuint8m2_t bindex, + vfloat32m8_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32mf2_t test_vamoswapei16_v_f32mf2(float *base, vuint16mf4_t bindex, + vfloat32mf2_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m1_t test_vamoswapei16_v_f32m1(float *base, vuint16mf2_t bindex, + vfloat32m1_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m2_t test_vamoswapei16_v_f32m2(float *base, vuint16m1_t bindex, + vfloat32m2_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m4_t test_vamoswapei16_v_f32m4(float *base, vuint16m2_t bindex, + vfloat32m4_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16f32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16f32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m8_t test_vamoswapei16_v_f32m8(float *base, vuint16m4_t bindex, + vfloat32m8_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32mf2_t test_vamoswapei32_v_f32mf2(float *base, vuint32mf2_t bindex, + vfloat32mf2_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m1_t test_vamoswapei32_v_f32m1(float *base, vuint32m1_t bindex, + vfloat32m1_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m2_t test_vamoswapei32_v_f32m2(float *base, vuint32m2_t bindex, + vfloat32m2_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m4_t test_vamoswapei32_v_f32m4(float *base, vuint32m4_t bindex, + vfloat32m4_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16f32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16f32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m8_t test_vamoswapei32_v_f32m8(float *base, vuint32m8_t bindex, + vfloat32m8_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32mf2_t test_vamoswapei64_v_f32mf2(float *base, vuint64m1_t bindex, + vfloat32mf2_t value, size_t vl) { + return vamoswapei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m1_t test_vamoswapei64_v_f32m1(float *base, vuint64m2_t bindex, + vfloat32m1_t value, size_t vl) { + return vamoswapei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m2_t test_vamoswapei64_v_f32m2(float *base, vuint64m4_t bindex, + vfloat32m2_t value, size_t vl) { + return vamoswapei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m4_t test_vamoswapei64_v_f32m4(float *base, vuint64m8_t bindex, + vfloat32m4_t value, size_t vl) { + return vamoswapei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m1_t test_vamoswapei8_v_f64m1(double *base, vuint8mf8_t bindex, + vfloat64m1_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m2_t test_vamoswapei8_v_f64m2(double *base, vuint8mf4_t bindex, + vfloat64m2_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m4_t test_vamoswapei8_v_f64m4(double *base, vuint8mf2_t bindex, + vfloat64m4_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m8_t test_vamoswapei8_v_f64m8(double *base, vuint8m1_t bindex, + vfloat64m8_t value, size_t vl) { + return vamoswapei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m1_t test_vamoswapei16_v_f64m1(double *base, vuint16mf4_t bindex, + vfloat64m1_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m2_t test_vamoswapei16_v_f64m2(double *base, vuint16mf2_t bindex, + vfloat64m2_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m4_t test_vamoswapei16_v_f64m4(double *base, vuint16m1_t bindex, + vfloat64m4_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m8_t test_vamoswapei16_v_f64m8(double *base, vuint16m2_t bindex, + vfloat64m8_t value, size_t vl) { + return vamoswapei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m1_t test_vamoswapei32_v_f64m1(double *base, vuint32mf2_t bindex, + vfloat64m1_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m2_t test_vamoswapei32_v_f64m2(double *base, vuint32m1_t bindex, + vfloat64m2_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m4_t test_vamoswapei32_v_f64m4(double *base, vuint32m2_t bindex, + vfloat64m4_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m8_t test_vamoswapei32_v_f64m8(double *base, vuint32m4_t bindex, + vfloat64m8_t value, size_t vl) { + return vamoswapei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m1_t test_vamoswapei64_v_f64m1(double *base, vuint64m1_t bindex, + vfloat64m1_t value, size_t vl) { + return vamoswapei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m2_t test_vamoswapei64_v_f64m2(double *base, vuint64m2_t bindex, + vfloat64m2_t value, size_t vl) { + return vamoswapei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m4_t test_vamoswapei64_v_f64m4(double *base, vuint64m4_t bindex, + vfloat64m4_t value, size_t vl) { + return vamoswapei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m8_t test_vamoswapei64_v_f64m8(double *base, vuint64m8_t bindex, + vfloat64m8_t value, size_t vl) { + return vamoswapei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoswapei8_v_i32mf2_m(vbool64_t mask, int32_t *base, + vuint8mf8_t bindex, vint32mf2_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoswapei8_v_i32m1_m(vbool32_t mask, int32_t *base, + vuint8mf4_t bindex, vint32m1_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoswapei8_v_i32m2_m(vbool16_t mask, int32_t *base, + vuint8mf2_t bindex, vint32m2_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoswapei8_v_i32m4_m(vbool8_t mask, int32_t *base, + vuint8m1_t bindex, vint32m4_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoswapei8_v_i32m8_m(vbool4_t mask, int32_t *base, + vuint8m2_t bindex, vint32m8_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoswapei16_v_i32mf2_m(vbool64_t mask, int32_t *base, + vuint16mf4_t bindex, vint32mf2_t value, + size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoswapei16_v_i32m1_m(vbool32_t mask, int32_t *base, + vuint16mf2_t bindex, vint32m1_t value, + size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoswapei16_v_i32m2_m(vbool16_t mask, int32_t *base, + vuint16m1_t bindex, vint32m2_t value, + size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoswapei16_v_i32m4_m(vbool8_t mask, int32_t *base, + vuint16m2_t bindex, vint32m4_t value, + size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoswapei16_v_i32m8_m(vbool4_t mask, int32_t *base, + vuint16m4_t bindex, vint32m8_t value, + size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoswapei32_v_i32mf2_m(vbool64_t mask, int32_t *base, + vuint32mf2_t bindex, vint32mf2_t value, + size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoswapei32_v_i32m1_m(vbool32_t mask, int32_t *base, + vuint32m1_t bindex, vint32m1_t value, + size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoswapei32_v_i32m2_m(vbool16_t mask, int32_t *base, + vuint32m2_t bindex, vint32m2_t value, + size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoswapei32_v_i32m4_m(vbool8_t mask, int32_t *base, + vuint32m4_t bindex, vint32m4_t value, + size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoswapei32_v_i32m8_m(vbool4_t mask, int32_t *base, + vuint32m8_t bindex, vint32m8_t value, + size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoswapei64_v_i32mf2_m(vbool64_t mask, int32_t *base, + vuint64m1_t bindex, vint32mf2_t value, + size_t vl) { + return vamoswapei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoswapei64_v_i32m1_m(vbool32_t mask, int32_t *base, + vuint64m2_t bindex, vint32m1_t value, + size_t vl) { + return vamoswapei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoswapei64_v_i32m2_m(vbool16_t mask, int32_t *base, + vuint64m4_t bindex, vint32m2_t value, + size_t vl) { + return vamoswapei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoswapei64_v_i32m4_m(vbool8_t mask, int32_t *base, + vuint64m8_t bindex, vint32m4_t value, + size_t vl) { + return vamoswapei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoswapei8_v_i64m1_m(vbool64_t mask, int64_t *base, + vuint8mf8_t bindex, vint64m1_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoswapei8_v_i64m2_m(vbool32_t mask, int64_t *base, + vuint8mf4_t bindex, vint64m2_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoswapei8_v_i64m4_m(vbool16_t mask, int64_t *base, + vuint8mf2_t bindex, vint64m4_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoswapei8_v_i64m8_m(vbool8_t mask, int64_t *base, + vuint8m1_t bindex, vint64m8_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoswapei16_v_i64m1_m(vbool64_t mask, int64_t *base, + vuint16mf4_t bindex, vint64m1_t value, + size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoswapei16_v_i64m2_m(vbool32_t mask, int64_t *base, + vuint16mf2_t bindex, vint64m2_t value, + size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoswapei16_v_i64m4_m(vbool16_t mask, int64_t *base, + vuint16m1_t bindex, vint64m4_t value, + size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoswapei16_v_i64m8_m(vbool8_t mask, int64_t *base, + vuint16m2_t bindex, vint64m8_t value, + size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoswapei32_v_i64m1_m(vbool64_t mask, int64_t *base, + vuint32mf2_t bindex, vint64m1_t value, + size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoswapei32_v_i64m2_m(vbool32_t mask, int64_t *base, + vuint32m1_t bindex, vint64m2_t value, + size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoswapei32_v_i64m4_m(vbool16_t mask, int64_t *base, + vuint32m2_t bindex, vint64m4_t value, + size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoswapei32_v_i64m8_m(vbool8_t mask, int64_t *base, + vuint32m4_t bindex, vint64m8_t value, + size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoswapei64_v_i64m1_m(vbool64_t mask, int64_t *base, + vuint64m1_t bindex, vint64m1_t value, + size_t vl) { + return vamoswapei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoswapei64_v_i64m2_m(vbool32_t mask, int64_t *base, + vuint64m2_t bindex, vint64m2_t value, + size_t vl) { + return vamoswapei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoswapei64_v_i64m4_m(vbool16_t mask, int64_t *base, + vuint64m4_t bindex, vint64m4_t value, + size_t vl) { + return vamoswapei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoswapei64_v_i64m8_m(vbool8_t mask, int64_t *base, + vuint64m8_t bindex, vint64m8_t value, + size_t vl) { + return vamoswapei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoswapei8_v_u32mf2_m(vbool64_t mask, uint32_t *base, + vuint8mf8_t bindex, vuint32mf2_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoswapei8_v_u32m1_m(vbool32_t mask, uint32_t *base, + vuint8mf4_t bindex, vuint32m1_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoswapei8_v_u32m2_m(vbool16_t mask, uint32_t *base, + vuint8mf2_t bindex, vuint32m2_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoswapei8_v_u32m4_m(vbool8_t mask, uint32_t *base, + vuint8m1_t bindex, vuint32m4_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoswapei8_v_u32m8_m(vbool4_t mask, uint32_t *base, + vuint8m2_t bindex, vuint32m8_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoswapei16_v_u32mf2_m(vbool64_t mask, uint32_t *base, + vuint16mf4_t bindex, + vuint32mf2_t value, size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoswapei16_v_u32m1_m(vbool32_t mask, uint32_t *base, + vuint16mf2_t bindex, vuint32m1_t value, + size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoswapei16_v_u32m2_m(vbool16_t mask, uint32_t *base, + vuint16m1_t bindex, vuint32m2_t value, + size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoswapei16_v_u32m4_m(vbool8_t mask, uint32_t *base, + vuint16m2_t bindex, vuint32m4_t value, + size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoswapei16_v_u32m8_m(vbool4_t mask, uint32_t *base, + vuint16m4_t bindex, vuint32m8_t value, + size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoswapei32_v_u32mf2_m(vbool64_t mask, uint32_t *base, + vuint32mf2_t bindex, + vuint32mf2_t value, size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoswapei32_v_u32m1_m(vbool32_t mask, uint32_t *base, + vuint32m1_t bindex, vuint32m1_t value, + size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoswapei32_v_u32m2_m(vbool16_t mask, uint32_t *base, + vuint32m2_t bindex, vuint32m2_t value, + size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoswapei32_v_u32m4_m(vbool8_t mask, uint32_t *base, + vuint32m4_t bindex, vuint32m4_t value, + size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoswapei32_v_u32m8_m(vbool4_t mask, uint32_t *base, + vuint32m8_t bindex, vuint32m8_t value, + size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoswapei64_v_u32mf2_m(vbool64_t mask, uint32_t *base, + vuint64m1_t bindex, + vuint32mf2_t value, size_t vl) { + return vamoswapei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoswapei64_v_u32m1_m(vbool32_t mask, uint32_t *base, + vuint64m2_t bindex, vuint32m1_t value, + size_t vl) { + return vamoswapei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoswapei64_v_u32m2_m(vbool16_t mask, uint32_t *base, + vuint64m4_t bindex, vuint32m2_t value, + size_t vl) { + return vamoswapei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoswapei64_v_u32m4_m(vbool8_t mask, uint32_t *base, + vuint64m8_t bindex, vuint32m4_t value, + size_t vl) { + return vamoswapei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoswapei8_v_u64m1_m(vbool64_t mask, uint64_t *base, + vuint8mf8_t bindex, vuint64m1_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoswapei8_v_u64m2_m(vbool32_t mask, uint64_t *base, + vuint8mf4_t bindex, vuint64m2_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoswapei8_v_u64m4_m(vbool16_t mask, uint64_t *base, + vuint8mf2_t bindex, vuint64m4_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoswapei8_v_u64m8_m(vbool8_t mask, uint64_t *base, + vuint8m1_t bindex, vuint64m8_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoswapei16_v_u64m1_m(vbool64_t mask, uint64_t *base, + vuint16mf4_t bindex, vuint64m1_t value, + size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoswapei16_v_u64m2_m(vbool32_t mask, uint64_t *base, + vuint16mf2_t bindex, vuint64m2_t value, + size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoswapei16_v_u64m4_m(vbool16_t mask, uint64_t *base, + vuint16m1_t bindex, vuint64m4_t value, + size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoswapei16_v_u64m8_m(vbool8_t mask, uint64_t *base, + vuint16m2_t bindex, vuint64m8_t value, + size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoswapei32_v_u64m1_m(vbool64_t mask, uint64_t *base, + vuint32mf2_t bindex, vuint64m1_t value, + size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoswapei32_v_u64m2_m(vbool32_t mask, uint64_t *base, + vuint32m1_t bindex, vuint64m2_t value, + size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoswapei32_v_u64m4_m(vbool16_t mask, uint64_t *base, + vuint32m2_t bindex, vuint64m4_t value, + size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoswapei32_v_u64m8_m(vbool8_t mask, uint64_t *base, + vuint32m4_t bindex, vuint64m8_t value, + size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoswapei64_v_u64m1_m(vbool64_t mask, uint64_t *base, + vuint64m1_t bindex, vuint64m1_t value, + size_t vl) { + return vamoswapei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoswapei64_v_u64m2_m(vbool32_t mask, uint64_t *base, + vuint64m2_t bindex, vuint64m2_t value, + size_t vl) { + return vamoswapei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoswapei64_v_u64m4_m(vbool16_t mask, uint64_t *base, + vuint64m4_t bindex, vuint64m4_t value, + size_t vl) { + return vamoswapei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoswapei64_v_u64m8_m(vbool8_t mask, uint64_t *base, + vuint64m8_t bindex, vuint64m8_t value, + size_t vl) { + return vamoswapei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32mf2_t test_vamoswapei8_v_f32mf2_m(vbool64_t mask, float *base, + vuint8mf8_t bindex, + vfloat32mf2_t value, size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m1_t test_vamoswapei8_v_f32m1_m(vbool32_t mask, float *base, + vuint8mf4_t bindex, vfloat32m1_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m2_t test_vamoswapei8_v_f32m2_m(vbool16_t mask, float *base, + vuint8mf2_t bindex, vfloat32m2_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m4_t test_vamoswapei8_v_f32m4_m(vbool8_t mask, float *base, + vuint8m1_t bindex, vfloat32m4_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16f32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16f32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m8_t test_vamoswapei8_v_f32m8_m(vbool4_t mask, float *base, + vuint8m2_t bindex, vfloat32m8_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32mf2_t test_vamoswapei16_v_f32mf2_m(vbool64_t mask, float *base, + vuint16mf4_t bindex, + vfloat32mf2_t value, size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m1_t test_vamoswapei16_v_f32m1_m(vbool32_t mask, float *base, + vuint16mf2_t bindex, + vfloat32m1_t value, size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m2_t test_vamoswapei16_v_f32m2_m(vbool16_t mask, float *base, + vuint16m1_t bindex, vfloat32m2_t value, + size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m4_t test_vamoswapei16_v_f32m4_m(vbool8_t mask, float *base, + vuint16m2_t bindex, vfloat32m4_t value, + size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16f32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16f32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m8_t test_vamoswapei16_v_f32m8_m(vbool4_t mask, float *base, + vuint16m4_t bindex, vfloat32m8_t value, + size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32mf2_t test_vamoswapei32_v_f32mf2_m(vbool64_t mask, float *base, + vuint32mf2_t bindex, + vfloat32mf2_t value, size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m1_t test_vamoswapei32_v_f32m1_m(vbool32_t mask, float *base, + vuint32m1_t bindex, vfloat32m1_t value, + size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m2_t test_vamoswapei32_v_f32m2_m(vbool16_t mask, float *base, + vuint32m2_t bindex, vfloat32m2_t value, + size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m4_t test_vamoswapei32_v_f32m4_m(vbool8_t mask, float *base, + vuint32m4_t bindex, vfloat32m4_t value, + size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16f32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16f32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m8_t test_vamoswapei32_v_f32m8_m(vbool4_t mask, float *base, + vuint32m8_t bindex, vfloat32m8_t value, + size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32mf2_t test_vamoswapei64_v_f32mf2_m(vbool64_t mask, float *base, + vuint64m1_t bindex, + vfloat32mf2_t value, size_t vl) { + return vamoswapei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m1_t test_vamoswapei64_v_f32m1_m(vbool32_t mask, float *base, + vuint64m2_t bindex, vfloat32m1_t value, + size_t vl) { + return vamoswapei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m2_t test_vamoswapei64_v_f32m2_m(vbool16_t mask, float *base, + vuint64m4_t bindex, vfloat32m2_t value, + size_t vl) { + return vamoswapei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m4_t test_vamoswapei64_v_f32m4_m(vbool8_t mask, float *base, + vuint64m8_t bindex, vfloat32m4_t value, + size_t vl) { + return vamoswapei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m1_t test_vamoswapei8_v_f64m1_m(vbool64_t mask, double *base, + vuint8mf8_t bindex, vfloat64m1_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m2_t test_vamoswapei8_v_f64m2_m(vbool32_t mask, double *base, + vuint8mf4_t bindex, vfloat64m2_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m4_t test_vamoswapei8_v_f64m4_m(vbool16_t mask, double *base, + vuint8mf2_t bindex, vfloat64m4_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m8_t test_vamoswapei8_v_f64m8_m(vbool8_t mask, double *base, + vuint8m1_t bindex, vfloat64m8_t value, + size_t vl) { + return vamoswapei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m1_t test_vamoswapei16_v_f64m1_m(vbool64_t mask, double *base, + vuint16mf4_t bindex, + vfloat64m1_t value, size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m2_t test_vamoswapei16_v_f64m2_m(vbool32_t mask, double *base, + vuint16mf2_t bindex, + vfloat64m2_t value, size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m4_t test_vamoswapei16_v_f64m4_m(vbool16_t mask, double *base, + vuint16m1_t bindex, vfloat64m4_t value, + size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m8_t test_vamoswapei16_v_f64m8_m(vbool8_t mask, double *base, + vuint16m2_t bindex, vfloat64m8_t value, + size_t vl) { + return vamoswapei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m1_t test_vamoswapei32_v_f64m1_m(vbool64_t mask, double *base, + vuint32mf2_t bindex, + vfloat64m1_t value, size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m2_t test_vamoswapei32_v_f64m2_m(vbool32_t mask, double *base, + vuint32m1_t bindex, vfloat64m2_t value, + size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m4_t test_vamoswapei32_v_f64m4_m(vbool16_t mask, double *base, + vuint32m2_t bindex, vfloat64m4_t value, + size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m8_t test_vamoswapei32_v_f64m8_m(vbool8_t mask, double *base, + vuint32m4_t bindex, vfloat64m8_t value, + size_t vl) { + return vamoswapei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m1_t test_vamoswapei64_v_f64m1_m(vbool64_t mask, double *base, + vuint64m1_t bindex, vfloat64m1_t value, + size_t vl) { + return vamoswapei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m2_t test_vamoswapei64_v_f64m2_m(vbool32_t mask, double *base, + vuint64m2_t bindex, vfloat64m2_t value, + size_t vl) { + return vamoswapei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m4_t test_vamoswapei64_v_f64m4_m(vbool16_t mask, double *base, + vuint64m4_t bindex, vfloat64m4_t value, + size_t vl) { + return vamoswapei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m8_t test_vamoswapei64_v_f64m8_m(vbool8_t mask, double *base, + vuint64m8_t bindex, vfloat64m8_t value, + size_t vl) { + return vamoswapei64(mask, base, bindex, value, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vamoxor.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vamoxor.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-overloaded/vamoxor.c @@ -0,0 +1,2250 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv32 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV32 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -Werror -Wall -o - %s -S >/dev/null 2>&1 +// RUN: FileCheck --check-prefix=ASM --allow-empty %s + +// ASM-NOT: warning +#include + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5:#.*]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5:#.*]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoxorei8_v_i32mf2 (int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamoxorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoxorei8_v_i32m1 (int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamoxorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoxorei8_v_i32m2 (int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamoxorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoxorei8_v_i32m4 (int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamoxorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoxorei8_v_i32m8 (int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamoxorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoxorei16_v_i32mf2 (int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamoxorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoxorei16_v_i32m1 (int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamoxorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoxorei16_v_i32m2 (int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamoxorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoxorei16_v_i32m4 (int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamoxorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoxorei16_v_i32m8 (int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamoxorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoxorei32_v_i32mf2 (int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamoxorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoxorei32_v_i32m1 (int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamoxorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoxorei32_v_i32m2 (int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamoxorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoxorei32_v_i32m4 (int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamoxorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoxorei32_v_i32m8 (int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamoxorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoxorei64_v_i32mf2 (int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamoxorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoxorei64_v_i32m1 (int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamoxorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoxorei64_v_i32m2 (int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamoxorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoxorei64_v_i32m4 (int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamoxorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoxorei8_v_i64m1 (int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamoxorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoxorei8_v_i64m2 (int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamoxorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoxorei8_v_i64m4 (int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamoxorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoxorei8_v_i64m8 (int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamoxorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoxorei16_v_i64m1 (int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamoxorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoxorei16_v_i64m2 (int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamoxorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoxorei16_v_i64m4 (int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamoxorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoxorei16_v_i64m8 (int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamoxorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoxorei32_v_i64m1 (int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamoxorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoxorei32_v_i64m2 (int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamoxorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoxorei32_v_i64m4 (int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamoxorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoxorei32_v_i64m8 (int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamoxorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoxorei64_v_i64m1 (int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamoxorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoxorei64_v_i64m2 (int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamoxorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoxorei64_v_i64m4 (int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamoxorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoxorei64_v_i64m8 (int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamoxorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoxorei8_v_u32mf2 (uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamoxorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoxorei8_v_u32m1 (uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamoxorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoxorei8_v_u32m2 (uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamoxorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoxorei8_v_u32m4 (uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamoxorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoxorei8_v_u32m8 (uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamoxorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoxorei16_v_u32mf2 (uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamoxorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoxorei16_v_u32m1 (uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamoxorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoxorei16_v_u32m2 (uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamoxorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoxorei16_v_u32m4 (uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamoxorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoxorei16_v_u32m8 (uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamoxorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoxorei32_v_u32mf2 (uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamoxorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoxorei32_v_u32m1 (uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamoxorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoxorei32_v_u32m2 (uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamoxorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoxorei32_v_u32m4 (uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamoxorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoxorei32_v_u32m8 (uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamoxorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoxorei64_v_u32mf2 (uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamoxorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoxorei64_v_u32m1 (uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamoxorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoxorei64_v_u32m2 (uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamoxorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoxorei64_v_u32m4 (uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamoxorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoxorei8_v_u64m1 (uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamoxorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoxorei8_v_u64m2 (uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamoxorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoxorei8_v_u64m4 (uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamoxorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoxorei8_v_u64m8 (uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamoxorei8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoxorei16_v_u64m1 (uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamoxorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoxorei16_v_u64m2 (uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamoxorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoxorei16_v_u64m4 (uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamoxorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoxorei16_v_u64m8 (uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamoxorei16(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoxorei32_v_u64m1 (uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamoxorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoxorei32_v_u64m2 (uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamoxorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoxorei32_v_u64m4 (uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamoxorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoxorei32_v_u64m8 (uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamoxorei32(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoxorei64_v_u64m1 (uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamoxorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoxorei64_v_u64m2 (uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamoxorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoxorei64_v_u64m4 (uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamoxorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoxorei64_v_u64m8 (uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamoxorei64(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoxorei8_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamoxorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoxorei8_v_i32m1_m (vbool32_t mask, int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamoxorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoxorei8_v_i32m2_m (vbool16_t mask, int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamoxorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoxorei8_v_i32m4_m (vbool8_t mask, int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamoxorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoxorei8_v_i32m8_m (vbool4_t mask, int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamoxorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoxorei16_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamoxorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoxorei16_v_i32m1_m (vbool32_t mask, int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamoxorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoxorei16_v_i32m2_m (vbool16_t mask, int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamoxorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoxorei16_v_i32m4_m (vbool8_t mask, int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamoxorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoxorei16_v_i32m8_m (vbool4_t mask, int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamoxorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoxorei32_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamoxorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoxorei32_v_i32m1_m (vbool32_t mask, int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamoxorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoxorei32_v_i32m2_m (vbool16_t mask, int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamoxorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoxorei32_v_i32m4_m (vbool8_t mask, int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamoxorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoxorei32_v_i32m8_m (vbool4_t mask, int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamoxorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoxorei64_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamoxorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoxorei64_v_i32m1_m (vbool32_t mask, int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamoxorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoxorei64_v_i32m2_m (vbool16_t mask, int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamoxorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoxorei64_v_i32m4_m (vbool8_t mask, int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamoxorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoxorei8_v_i64m1_m (vbool64_t mask, int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamoxorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoxorei8_v_i64m2_m (vbool32_t mask, int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamoxorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoxorei8_v_i64m4_m (vbool16_t mask, int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamoxorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoxorei8_v_i64m8_m (vbool8_t mask, int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamoxorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoxorei16_v_i64m1_m (vbool64_t mask, int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamoxorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoxorei16_v_i64m2_m (vbool32_t mask, int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamoxorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoxorei16_v_i64m4_m (vbool16_t mask, int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamoxorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoxorei16_v_i64m8_m (vbool8_t mask, int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamoxorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoxorei32_v_i64m1_m (vbool64_t mask, int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamoxorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoxorei32_v_i64m2_m (vbool32_t mask, int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamoxorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoxorei32_v_i64m4_m (vbool16_t mask, int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamoxorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoxorei32_v_i64m8_m (vbool8_t mask, int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamoxorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoxorei64_v_i64m1_m (vbool64_t mask, int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamoxorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoxorei64_v_i64m2_m (vbool32_t mask, int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamoxorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoxorei64_v_i64m4_m (vbool16_t mask, int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamoxorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoxorei64_v_i64m8_m (vbool8_t mask, int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamoxorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoxorei8_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamoxorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoxorei8_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamoxorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoxorei8_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamoxorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoxorei8_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamoxorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoxorei8_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamoxorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoxorei16_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamoxorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoxorei16_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamoxorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoxorei16_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamoxorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoxorei16_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamoxorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoxorei16_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamoxorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoxorei32_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamoxorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoxorei32_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamoxorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoxorei32_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamoxorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoxorei32_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamoxorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoxorei32_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamoxorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoxorei64_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamoxorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoxorei64_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamoxorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoxorei64_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamoxorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoxorei64_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamoxorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoxorei8_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamoxorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoxorei8_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamoxorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoxorei8_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamoxorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoxorei8_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamoxorei8(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoxorei16_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamoxorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoxorei16_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamoxorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoxorei16_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamoxorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoxorei16_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamoxorei16(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoxorei32_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamoxorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoxorei32_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamoxorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoxorei32_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamoxorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoxorei32_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamoxorei32(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoxorei64_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamoxorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoxorei64_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamoxorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoxorei64_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamoxorei64(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) [[ATTR5]] +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoxorei64_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamoxorei64(mask, base, bindex, value, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics/vamoadd.c b/clang/test/CodeGen/RISCV/rvv-intrinsics/vamoadd.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics/vamoadd.c @@ -0,0 +1,2250 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv32 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV32 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -Werror -Wall -o - %s -S >/dev/null 2>&1 +// RUN: FileCheck --check-prefix=ASM --allow-empty %s + +// ASM-NOT: warning +#include + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoaddei8_v_i32mf2 (int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamoaddei8_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoaddei8_v_i32m1 (int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamoaddei8_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoaddei8_v_i32m2 (int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamoaddei8_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoaddei8_v_i32m4 (int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamoaddei8_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoaddei8_v_i32m8 (int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamoaddei8_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoaddei16_v_i32mf2 (int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamoaddei16_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoaddei16_v_i32m1 (int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamoaddei16_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoaddei16_v_i32m2 (int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamoaddei16_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoaddei16_v_i32m4 (int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamoaddei16_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoaddei16_v_i32m8 (int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamoaddei16_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoaddei32_v_i32mf2 (int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamoaddei32_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoaddei32_v_i32m1 (int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamoaddei32_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoaddei32_v_i32m2 (int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamoaddei32_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoaddei32_v_i32m4 (int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamoaddei32_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoaddei32_v_i32m8 (int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamoaddei32_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoaddei64_v_i32mf2 (int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamoaddei64_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoaddei64_v_i32m1 (int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamoaddei64_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoaddei64_v_i32m2 (int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamoaddei64_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoaddei64_v_i32m4 (int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamoaddei64_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoaddei8_v_i64m1 (int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamoaddei8_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoaddei8_v_i64m2 (int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamoaddei8_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoaddei8_v_i64m4 (int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamoaddei8_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoaddei8_v_i64m8 (int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamoaddei8_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoaddei16_v_i64m1 (int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamoaddei16_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoaddei16_v_i64m2 (int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamoaddei16_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoaddei16_v_i64m4 (int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamoaddei16_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoaddei16_v_i64m8 (int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamoaddei16_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoaddei32_v_i64m1 (int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamoaddei32_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoaddei32_v_i64m2 (int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamoaddei32_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoaddei32_v_i64m4 (int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamoaddei32_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoaddei32_v_i64m8 (int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamoaddei32_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoaddei64_v_i64m1 (int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamoaddei64_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoaddei64_v_i64m2 (int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamoaddei64_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoaddei64_v_i64m4 (int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamoaddei64_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoaddei64_v_i64m8 (int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamoaddei64_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoaddei8_v_u32mf2 (uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamoaddei8_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoaddei8_v_u32m1 (uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamoaddei8_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoaddei8_v_u32m2 (uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamoaddei8_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoaddei8_v_u32m4 (uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamoaddei8_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoaddei8_v_u32m8 (uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamoaddei8_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoaddei16_v_u32mf2 (uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamoaddei16_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoaddei16_v_u32m1 (uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamoaddei16_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoaddei16_v_u32m2 (uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamoaddei16_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoaddei16_v_u32m4 (uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamoaddei16_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoaddei16_v_u32m8 (uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamoaddei16_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoaddei32_v_u32mf2 (uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamoaddei32_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoaddei32_v_u32m1 (uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamoaddei32_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoaddei32_v_u32m2 (uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamoaddei32_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoaddei32_v_u32m4 (uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamoaddei32_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoaddei32_v_u32m8 (uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamoaddei32_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoaddei64_v_u32mf2 (uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamoaddei64_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoaddei64_v_u32m1 (uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamoaddei64_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoaddei64_v_u32m2 (uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamoaddei64_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoaddei64_v_u32m4 (uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamoaddei64_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoaddei8_v_u64m1 (uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamoaddei8_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoaddei8_v_u64m2 (uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamoaddei8_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoaddei8_v_u64m4 (uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamoaddei8_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoaddei8_v_u64m8 (uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamoaddei8_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoaddei16_v_u64m1 (uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamoaddei16_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoaddei16_v_u64m2 (uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamoaddei16_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoaddei16_v_u64m4 (uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamoaddei16_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoaddei16_v_u64m8 (uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamoaddei16_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoaddei32_v_u64m1 (uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamoaddei32_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoaddei32_v_u64m2 (uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamoaddei32_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoaddei32_v_u64m4 (uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamoaddei32_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoaddei32_v_u64m8 (uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamoaddei32_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoaddei64_v_u64m1 (uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamoaddei64_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoaddei64_v_u64m2 (uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamoaddei64_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoaddei64_v_u64m4 (uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamoaddei64_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoaddei64_v_u64m8 (uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamoaddei64_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoaddei8_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamoaddei8_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoaddei8_v_i32m1_m (vbool32_t mask, int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamoaddei8_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoaddei8_v_i32m2_m (vbool16_t mask, int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamoaddei8_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoaddei8_v_i32m4_m (vbool8_t mask, int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamoaddei8_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoaddei8_v_i32m8_m (vbool4_t mask, int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamoaddei8_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoaddei16_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamoaddei16_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoaddei16_v_i32m1_m (vbool32_t mask, int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamoaddei16_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoaddei16_v_i32m2_m (vbool16_t mask, int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamoaddei16_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoaddei16_v_i32m4_m (vbool8_t mask, int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamoaddei16_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoaddei16_v_i32m8_m (vbool4_t mask, int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamoaddei16_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoaddei32_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamoaddei32_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoaddei32_v_i32m1_m (vbool32_t mask, int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamoaddei32_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoaddei32_v_i32m2_m (vbool16_t mask, int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamoaddei32_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoaddei32_v_i32m4_m (vbool8_t mask, int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamoaddei32_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoaddei32_v_i32m8_m (vbool4_t mask, int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamoaddei32_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoaddei64_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamoaddei64_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoaddei64_v_i32m1_m (vbool32_t mask, int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamoaddei64_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoaddei64_v_i32m2_m (vbool16_t mask, int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamoaddei64_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoaddei64_v_i32m4_m (vbool8_t mask, int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamoaddei64_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoaddei8_v_i64m1_m (vbool64_t mask, int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamoaddei8_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoaddei8_v_i64m2_m (vbool32_t mask, int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamoaddei8_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoaddei8_v_i64m4_m (vbool16_t mask, int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamoaddei8_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoaddei8_v_i64m8_m (vbool8_t mask, int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamoaddei8_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoaddei16_v_i64m1_m (vbool64_t mask, int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamoaddei16_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoaddei16_v_i64m2_m (vbool32_t mask, int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamoaddei16_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoaddei16_v_i64m4_m (vbool16_t mask, int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamoaddei16_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoaddei16_v_i64m8_m (vbool8_t mask, int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamoaddei16_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoaddei32_v_i64m1_m (vbool64_t mask, int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamoaddei32_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoaddei32_v_i64m2_m (vbool32_t mask, int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamoaddei32_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoaddei32_v_i64m4_m (vbool16_t mask, int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamoaddei32_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoaddei32_v_i64m8_m (vbool8_t mask, int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamoaddei32_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoaddei64_v_i64m1_m (vbool64_t mask, int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamoaddei64_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoaddei64_v_i64m2_m (vbool32_t mask, int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamoaddei64_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoaddei64_v_i64m4_m (vbool16_t mask, int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamoaddei64_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoaddei64_v_i64m8_m (vbool8_t mask, int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamoaddei64_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoaddei8_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamoaddei8_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoaddei8_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamoaddei8_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoaddei8_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamoaddei8_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoaddei8_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamoaddei8_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoaddei8_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamoaddei8_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoaddei16_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamoaddei16_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoaddei16_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamoaddei16_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoaddei16_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamoaddei16_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoaddei16_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamoaddei16_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoaddei16_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamoaddei16_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoaddei32_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamoaddei32_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoaddei32_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamoaddei32_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoaddei32_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamoaddei32_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoaddei32_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamoaddei32_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoaddei32_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamoaddei32_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoaddei64_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamoaddei64_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoaddei64_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamoaddei64_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoaddei64_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamoaddei64_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoaddei64_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamoaddei64_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoaddei8_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamoaddei8_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoaddei8_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamoaddei8_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoaddei8_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamoaddei8_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei8_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei8_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoaddei8_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamoaddei8_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoaddei16_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamoaddei16_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoaddei16_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamoaddei16_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoaddei16_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamoaddei16_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei16_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei16_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoaddei16_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamoaddei16_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoaddei32_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamoaddei32_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoaddei32_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamoaddei32_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoaddei32_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamoaddei32_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei32_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei32_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoaddei32_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamoaddei32_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoaddei64_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamoaddei64_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoaddei64_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamoaddei64_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoaddei64_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamoaddei64_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoaddei64_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoaddei64_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoadd.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoaddei64_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamoaddei64_v_u64m8_m(mask, base, bindex, value, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics/vamoand.c b/clang/test/CodeGen/RISCV/rvv-intrinsics/vamoand.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics/vamoand.c @@ -0,0 +1,2250 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv32 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV32 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -Werror -Wall -o - %s -S >/dev/null 2>&1 +// RUN: FileCheck --check-prefix=ASM --allow-empty %s + +// ASM-NOT: warning +#include + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoandei8_v_i32mf2 (int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamoandei8_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoandei8_v_i32m1 (int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamoandei8_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoandei8_v_i32m2 (int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamoandei8_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoandei8_v_i32m4 (int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamoandei8_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoandei8_v_i32m8 (int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamoandei8_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoandei16_v_i32mf2 (int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamoandei16_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoandei16_v_i32m1 (int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamoandei16_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoandei16_v_i32m2 (int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamoandei16_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoandei16_v_i32m4 (int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamoandei16_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoandei16_v_i32m8 (int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamoandei16_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoandei32_v_i32mf2 (int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamoandei32_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoandei32_v_i32m1 (int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamoandei32_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoandei32_v_i32m2 (int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamoandei32_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoandei32_v_i32m4 (int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamoandei32_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoandei32_v_i32m8 (int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamoandei32_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoandei64_v_i32mf2 (int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamoandei64_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoandei64_v_i32m1 (int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamoandei64_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoandei64_v_i32m2 (int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamoandei64_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoandei64_v_i32m4 (int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamoandei64_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoandei8_v_i64m1 (int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamoandei8_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoandei8_v_i64m2 (int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamoandei8_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoandei8_v_i64m4 (int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamoandei8_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoandei8_v_i64m8 (int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamoandei8_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoandei16_v_i64m1 (int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamoandei16_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoandei16_v_i64m2 (int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamoandei16_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoandei16_v_i64m4 (int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamoandei16_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoandei16_v_i64m8 (int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamoandei16_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoandei32_v_i64m1 (int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamoandei32_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoandei32_v_i64m2 (int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamoandei32_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoandei32_v_i64m4 (int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamoandei32_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoandei32_v_i64m8 (int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamoandei32_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoandei64_v_i64m1 (int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamoandei64_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoandei64_v_i64m2 (int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamoandei64_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoandei64_v_i64m4 (int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamoandei64_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoandei64_v_i64m8 (int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamoandei64_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoandei8_v_u32mf2 (uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamoandei8_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoandei8_v_u32m1 (uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamoandei8_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoandei8_v_u32m2 (uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamoandei8_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoandei8_v_u32m4 (uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamoandei8_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoandei8_v_u32m8 (uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamoandei8_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoandei16_v_u32mf2 (uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamoandei16_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoandei16_v_u32m1 (uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamoandei16_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoandei16_v_u32m2 (uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamoandei16_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoandei16_v_u32m4 (uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamoandei16_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoandei16_v_u32m8 (uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamoandei16_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoandei32_v_u32mf2 (uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamoandei32_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoandei32_v_u32m1 (uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamoandei32_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoandei32_v_u32m2 (uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamoandei32_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoandei32_v_u32m4 (uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamoandei32_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoandei32_v_u32m8 (uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamoandei32_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoandei64_v_u32mf2 (uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamoandei64_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoandei64_v_u32m1 (uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamoandei64_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoandei64_v_u32m2 (uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamoandei64_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoandei64_v_u32m4 (uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamoandei64_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoandei8_v_u64m1 (uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamoandei8_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoandei8_v_u64m2 (uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamoandei8_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoandei8_v_u64m4 (uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamoandei8_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoandei8_v_u64m8 (uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamoandei8_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoandei16_v_u64m1 (uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamoandei16_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoandei16_v_u64m2 (uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamoandei16_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoandei16_v_u64m4 (uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamoandei16_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoandei16_v_u64m8 (uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamoandei16_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoandei32_v_u64m1 (uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamoandei32_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoandei32_v_u64m2 (uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamoandei32_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoandei32_v_u64m4 (uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamoandei32_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoandei32_v_u64m8 (uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamoandei32_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoandei64_v_u64m1 (uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamoandei64_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoandei64_v_u64m2 (uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamoandei64_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoandei64_v_u64m4 (uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamoandei64_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoandei64_v_u64m8 (uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamoandei64_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoandei8_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamoandei8_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoandei8_v_i32m1_m (vbool32_t mask, int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamoandei8_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoandei8_v_i32m2_m (vbool16_t mask, int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamoandei8_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoandei8_v_i32m4_m (vbool8_t mask, int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamoandei8_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoandei8_v_i32m8_m (vbool4_t mask, int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamoandei8_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoandei16_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamoandei16_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoandei16_v_i32m1_m (vbool32_t mask, int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamoandei16_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoandei16_v_i32m2_m (vbool16_t mask, int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamoandei16_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoandei16_v_i32m4_m (vbool8_t mask, int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamoandei16_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoandei16_v_i32m8_m (vbool4_t mask, int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamoandei16_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoandei32_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamoandei32_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoandei32_v_i32m1_m (vbool32_t mask, int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamoandei32_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoandei32_v_i32m2_m (vbool16_t mask, int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamoandei32_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoandei32_v_i32m4_m (vbool8_t mask, int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamoandei32_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoandei32_v_i32m8_m (vbool4_t mask, int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamoandei32_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoandei64_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamoandei64_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoandei64_v_i32m1_m (vbool32_t mask, int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamoandei64_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoandei64_v_i32m2_m (vbool16_t mask, int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamoandei64_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoandei64_v_i32m4_m (vbool8_t mask, int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamoandei64_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoandei8_v_i64m1_m (vbool64_t mask, int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamoandei8_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoandei8_v_i64m2_m (vbool32_t mask, int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamoandei8_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoandei8_v_i64m4_m (vbool16_t mask, int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamoandei8_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoandei8_v_i64m8_m (vbool8_t mask, int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamoandei8_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoandei16_v_i64m1_m (vbool64_t mask, int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamoandei16_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoandei16_v_i64m2_m (vbool32_t mask, int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamoandei16_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoandei16_v_i64m4_m (vbool16_t mask, int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamoandei16_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoandei16_v_i64m8_m (vbool8_t mask, int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamoandei16_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoandei32_v_i64m1_m (vbool64_t mask, int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamoandei32_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoandei32_v_i64m2_m (vbool32_t mask, int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamoandei32_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoandei32_v_i64m4_m (vbool16_t mask, int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamoandei32_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoandei32_v_i64m8_m (vbool8_t mask, int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamoandei32_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoandei64_v_i64m1_m (vbool64_t mask, int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamoandei64_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoandei64_v_i64m2_m (vbool32_t mask, int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamoandei64_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoandei64_v_i64m4_m (vbool16_t mask, int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamoandei64_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoandei64_v_i64m8_m (vbool8_t mask, int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamoandei64_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoandei8_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamoandei8_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoandei8_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamoandei8_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoandei8_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamoandei8_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoandei8_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamoandei8_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoandei8_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamoandei8_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoandei16_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamoandei16_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoandei16_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamoandei16_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoandei16_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamoandei16_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoandei16_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamoandei16_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoandei16_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamoandei16_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoandei32_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamoandei32_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoandei32_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamoandei32_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoandei32_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamoandei32_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoandei32_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamoandei32_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoandei32_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamoandei32_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoandei64_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamoandei64_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoandei64_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamoandei64_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoandei64_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamoandei64_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoandei64_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamoandei64_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoandei8_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamoandei8_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoandei8_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamoandei8_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoandei8_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamoandei8_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei8_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei8_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoandei8_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamoandei8_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoandei16_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamoandei16_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoandei16_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamoandei16_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoandei16_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamoandei16_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei16_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei16_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoandei16_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamoandei16_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoandei32_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamoandei32_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoandei32_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamoandei32_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoandei32_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamoandei32_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei32_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei32_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoandei32_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamoandei32_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoandei64_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamoandei64_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoandei64_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamoandei64_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoandei64_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamoandei64_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoandei64_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoandei64_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoand.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoandei64_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamoandei64_v_u64m8_m(mask, base, bindex, value, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics/vamomax.c b/clang/test/CodeGen/RISCV/rvv-intrinsics/vamomax.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics/vamomax.c @@ -0,0 +1,2250 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv32 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV32 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -Werror -Wall -o - %s -S >/dev/null 2>&1 +// RUN: FileCheck --check-prefix=ASM --allow-empty %s + +// ASM-NOT: warning +#include + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamomaxei8_v_i32mf2 (int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamomaxei8_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamomaxei8_v_i32m1 (int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamomaxei8_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamomaxei8_v_i32m2 (int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamomaxei8_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamomaxei8_v_i32m4 (int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamomaxei8_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamomaxei8_v_i32m8 (int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamomaxei8_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamomaxei16_v_i32mf2 (int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamomaxei16_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamomaxei16_v_i32m1 (int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamomaxei16_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamomaxei16_v_i32m2 (int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamomaxei16_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamomaxei16_v_i32m4 (int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamomaxei16_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamomaxei16_v_i32m8 (int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamomaxei16_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamomaxei32_v_i32mf2 (int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamomaxei32_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamomaxei32_v_i32m1 (int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamomaxei32_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamomaxei32_v_i32m2 (int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamomaxei32_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamomaxei32_v_i32m4 (int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamomaxei32_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamomaxei32_v_i32m8 (int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamomaxei32_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamomaxei64_v_i32mf2 (int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamomaxei64_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamomaxei64_v_i32m1 (int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamomaxei64_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamomaxei64_v_i32m2 (int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamomaxei64_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamomaxei64_v_i32m4 (int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamomaxei64_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamomaxei8_v_i64m1 (int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamomaxei8_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamomaxei8_v_i64m2 (int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamomaxei8_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamomaxei8_v_i64m4 (int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamomaxei8_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamomaxei8_v_i64m8 (int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamomaxei8_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamomaxei16_v_i64m1 (int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamomaxei16_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamomaxei16_v_i64m2 (int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamomaxei16_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamomaxei16_v_i64m4 (int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamomaxei16_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamomaxei16_v_i64m8 (int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamomaxei16_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamomaxei32_v_i64m1 (int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamomaxei32_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamomaxei32_v_i64m2 (int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamomaxei32_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamomaxei32_v_i64m4 (int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamomaxei32_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamomaxei32_v_i64m8 (int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamomaxei32_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamomaxei64_v_i64m1 (int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamomaxei64_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamomaxei64_v_i64m2 (int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamomaxei64_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamomaxei64_v_i64m4 (int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamomaxei64_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamomaxei64_v_i64m8 (int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamomaxei64_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamomaxuei8_v_u32mf2 (uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamomaxuei8_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamomaxuei8_v_u32m1 (uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamomaxuei8_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamomaxuei8_v_u32m2 (uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamomaxuei8_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamomaxuei8_v_u32m4 (uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamomaxuei8_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamomaxuei8_v_u32m8 (uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamomaxuei8_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamomaxuei16_v_u32mf2 (uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamomaxuei16_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamomaxuei16_v_u32m1 (uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamomaxuei16_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamomaxuei16_v_u32m2 (uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamomaxuei16_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamomaxuei16_v_u32m4 (uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamomaxuei16_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamomaxuei16_v_u32m8 (uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamomaxuei16_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamomaxuei32_v_u32mf2 (uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamomaxuei32_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamomaxuei32_v_u32m1 (uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamomaxuei32_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamomaxuei32_v_u32m2 (uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamomaxuei32_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamomaxuei32_v_u32m4 (uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamomaxuei32_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamomaxuei32_v_u32m8 (uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamomaxuei32_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamomaxuei64_v_u32mf2 (uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamomaxuei64_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamomaxuei64_v_u32m1 (uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamomaxuei64_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamomaxuei64_v_u32m2 (uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamomaxuei64_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamomaxuei64_v_u32m4 (uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamomaxuei64_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamomaxuei8_v_u64m1 (uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamomaxuei8_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamomaxuei8_v_u64m2 (uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamomaxuei8_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamomaxuei8_v_u64m4 (uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamomaxuei8_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamomaxuei8_v_u64m8 (uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamomaxuei8_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamomaxuei16_v_u64m1 (uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamomaxuei16_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamomaxuei16_v_u64m2 (uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamomaxuei16_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamomaxuei16_v_u64m4 (uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamomaxuei16_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamomaxuei16_v_u64m8 (uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamomaxuei16_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamomaxuei32_v_u64m1 (uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamomaxuei32_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamomaxuei32_v_u64m2 (uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamomaxuei32_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamomaxuei32_v_u64m4 (uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamomaxuei32_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamomaxuei32_v_u64m8 (uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamomaxuei32_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamomaxuei64_v_u64m1 (uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamomaxuei64_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamomaxuei64_v_u64m2 (uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamomaxuei64_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamomaxuei64_v_u64m4 (uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamomaxuei64_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamomaxuei64_v_u64m8 (uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamomaxuei64_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamomaxei8_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamomaxei8_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamomaxei8_v_i32m1_m (vbool32_t mask, int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamomaxei8_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamomaxei8_v_i32m2_m (vbool16_t mask, int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamomaxei8_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamomaxei8_v_i32m4_m (vbool8_t mask, int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamomaxei8_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamomaxei8_v_i32m8_m (vbool4_t mask, int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamomaxei8_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamomaxei16_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamomaxei16_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamomaxei16_v_i32m1_m (vbool32_t mask, int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamomaxei16_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamomaxei16_v_i32m2_m (vbool16_t mask, int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamomaxei16_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamomaxei16_v_i32m4_m (vbool8_t mask, int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamomaxei16_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamomaxei16_v_i32m8_m (vbool4_t mask, int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamomaxei16_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamomaxei32_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamomaxei32_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamomaxei32_v_i32m1_m (vbool32_t mask, int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamomaxei32_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamomaxei32_v_i32m2_m (vbool16_t mask, int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamomaxei32_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamomaxei32_v_i32m4_m (vbool8_t mask, int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamomaxei32_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamomaxei32_v_i32m8_m (vbool4_t mask, int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamomaxei32_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamomaxei64_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamomaxei64_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamomaxei64_v_i32m1_m (vbool32_t mask, int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamomaxei64_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamomaxei64_v_i32m2_m (vbool16_t mask, int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamomaxei64_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamomaxei64_v_i32m4_m (vbool8_t mask, int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamomaxei64_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamomaxei8_v_i64m1_m (vbool64_t mask, int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamomaxei8_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamomaxei8_v_i64m2_m (vbool32_t mask, int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamomaxei8_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamomaxei8_v_i64m4_m (vbool16_t mask, int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamomaxei8_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei8_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei8_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamomaxei8_v_i64m8_m (vbool8_t mask, int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamomaxei8_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamomaxei16_v_i64m1_m (vbool64_t mask, int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamomaxei16_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamomaxei16_v_i64m2_m (vbool32_t mask, int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamomaxei16_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamomaxei16_v_i64m4_m (vbool16_t mask, int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamomaxei16_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei16_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei16_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamomaxei16_v_i64m8_m (vbool8_t mask, int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamomaxei16_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamomaxei32_v_i64m1_m (vbool64_t mask, int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamomaxei32_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamomaxei32_v_i64m2_m (vbool32_t mask, int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamomaxei32_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamomaxei32_v_i64m4_m (vbool16_t mask, int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamomaxei32_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei32_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei32_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamomaxei32_v_i64m8_m (vbool8_t mask, int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamomaxei32_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamomaxei64_v_i64m1_m (vbool64_t mask, int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamomaxei64_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamomaxei64_v_i64m2_m (vbool32_t mask, int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamomaxei64_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamomaxei64_v_i64m4_m (vbool16_t mask, int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamomaxei64_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxei64_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxei64_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomax.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamomaxei64_v_i64m8_m (vbool8_t mask, int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamomaxei64_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamomaxuei8_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamomaxuei8_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamomaxuei8_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamomaxuei8_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamomaxuei8_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamomaxuei8_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamomaxuei8_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamomaxuei8_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamomaxuei8_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamomaxuei8_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamomaxuei16_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamomaxuei16_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamomaxuei16_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamomaxuei16_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamomaxuei16_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamomaxuei16_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamomaxuei16_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamomaxuei16_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamomaxuei16_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamomaxuei16_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamomaxuei32_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamomaxuei32_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamomaxuei32_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamomaxuei32_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamomaxuei32_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamomaxuei32_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamomaxuei32_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamomaxuei32_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamomaxuei32_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamomaxuei32_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamomaxuei64_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamomaxuei64_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamomaxuei64_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamomaxuei64_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamomaxuei64_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamomaxuei64_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamomaxuei64_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamomaxuei64_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamomaxuei8_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamomaxuei8_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamomaxuei8_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamomaxuei8_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamomaxuei8_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamomaxuei8_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei8_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei8_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamomaxuei8_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamomaxuei8_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamomaxuei16_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamomaxuei16_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamomaxuei16_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamomaxuei16_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamomaxuei16_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamomaxuei16_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei16_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei16_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamomaxuei16_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamomaxuei16_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamomaxuei32_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamomaxuei32_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamomaxuei32_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamomaxuei32_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamomaxuei32_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamomaxuei32_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei32_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei32_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamomaxuei32_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamomaxuei32_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamomaxuei64_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamomaxuei64_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamomaxuei64_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamomaxuei64_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamomaxuei64_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamomaxuei64_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamomaxuei64_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamomaxuei64_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomaxu.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamomaxuei64_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamomaxuei64_v_u64m8_m(mask, base, bindex, value, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics/vamomin.c b/clang/test/CodeGen/RISCV/rvv-intrinsics/vamomin.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics/vamomin.c @@ -0,0 +1,2250 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv32 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV32 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -Werror -Wall -o - %s -S >/dev/null 2>&1 +// RUN: FileCheck --check-prefix=ASM --allow-empty %s + +// ASM-NOT: warning +#include + +// CHECK-RV32-LABEL: @test_vamominei8_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamominei8_v_i32mf2 (int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamominei8_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamominei8_v_i32m1 (int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamominei8_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamominei8_v_i32m2 (int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamominei8_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamominei8_v_i32m4 (int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamominei8_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamominei8_v_i32m8 (int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamominei8_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamominei16_v_i32mf2 (int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamominei16_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamominei16_v_i32m1 (int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamominei16_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamominei16_v_i32m2 (int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamominei16_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamominei16_v_i32m4 (int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamominei16_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamominei16_v_i32m8 (int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamominei16_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamominei32_v_i32mf2 (int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamominei32_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamominei32_v_i32m1 (int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamominei32_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamominei32_v_i32m2 (int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamominei32_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamominei32_v_i32m4 (int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamominei32_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamominei32_v_i32m8 (int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamominei32_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamominei64_v_i32mf2 (int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamominei64_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamominei64_v_i32m1 (int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamominei64_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamominei64_v_i32m2 (int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamominei64_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamominei64_v_i32m4 (int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamominei64_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamominei8_v_i64m1 (int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamominei8_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamominei8_v_i64m2 (int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamominei8_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamominei8_v_i64m4 (int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamominei8_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamominei8_v_i64m8 (int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamominei8_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamominei16_v_i64m1 (int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamominei16_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamominei16_v_i64m2 (int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamominei16_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamominei16_v_i64m4 (int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamominei16_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamominei16_v_i64m8 (int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamominei16_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamominei32_v_i64m1 (int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamominei32_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamominei32_v_i64m2 (int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamominei32_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamominei32_v_i64m4 (int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamominei32_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamominei32_v_i64m8 (int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamominei32_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamominei64_v_i64m1 (int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamominei64_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamominei64_v_i64m2 (int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamominei64_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamominei64_v_i64m4 (int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamominei64_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamominei64_v_i64m8 (int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamominei64_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamominuei8_v_u32mf2 (uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamominuei8_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamominuei8_v_u32m1 (uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamominuei8_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamominuei8_v_u32m2 (uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamominuei8_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamominuei8_v_u32m4 (uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamominuei8_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamominuei8_v_u32m8 (uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamominuei8_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamominuei16_v_u32mf2 (uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamominuei16_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamominuei16_v_u32m1 (uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamominuei16_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamominuei16_v_u32m2 (uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamominuei16_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamominuei16_v_u32m4 (uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamominuei16_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamominuei16_v_u32m8 (uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamominuei16_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamominuei32_v_u32mf2 (uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamominuei32_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamominuei32_v_u32m1 (uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamominuei32_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamominuei32_v_u32m2 (uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamominuei32_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamominuei32_v_u32m4 (uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamominuei32_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamominuei32_v_u32m8 (uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamominuei32_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamominuei64_v_u32mf2 (uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamominuei64_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamominuei64_v_u32m1 (uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamominuei64_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamominuei64_v_u32m2 (uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamominuei64_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamominuei64_v_u32m4 (uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamominuei64_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamominuei8_v_u64m1 (uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamominuei8_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamominuei8_v_u64m2 (uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamominuei8_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamominuei8_v_u64m4 (uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamominuei8_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamominuei8_v_u64m8 (uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamominuei8_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamominuei16_v_u64m1 (uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamominuei16_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamominuei16_v_u64m2 (uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamominuei16_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamominuei16_v_u64m4 (uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamominuei16_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamominuei16_v_u64m8 (uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamominuei16_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamominuei32_v_u64m1 (uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamominuei32_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamominuei32_v_u64m2 (uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamominuei32_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamominuei32_v_u64m4 (uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamominuei32_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamominuei32_v_u64m8 (uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamominuei32_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamominuei64_v_u64m1 (uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamominuei64_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamominuei64_v_u64m2 (uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamominuei64_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamominuei64_v_u64m4 (uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamominuei64_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamominuei64_v_u64m8 (uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamominuei64_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamominei8_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamominei8_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamominei8_v_i32m1_m (vbool32_t mask, int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamominei8_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamominei8_v_i32m2_m (vbool16_t mask, int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamominei8_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamominei8_v_i32m4_m (vbool8_t mask, int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamominei8_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamominei8_v_i32m8_m (vbool4_t mask, int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamominei8_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamominei16_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamominei16_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamominei16_v_i32m1_m (vbool32_t mask, int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamominei16_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamominei16_v_i32m2_m (vbool16_t mask, int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamominei16_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamominei16_v_i32m4_m (vbool8_t mask, int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamominei16_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamominei16_v_i32m8_m (vbool4_t mask, int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamominei16_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamominei32_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamominei32_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamominei32_v_i32m1_m (vbool32_t mask, int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamominei32_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamominei32_v_i32m2_m (vbool16_t mask, int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamominei32_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamominei32_v_i32m4_m (vbool8_t mask, int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamominei32_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamominei32_v_i32m8_m (vbool4_t mask, int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamominei32_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamominei64_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamominei64_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamominei64_v_i32m1_m (vbool32_t mask, int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamominei64_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamominei64_v_i32m2_m (vbool16_t mask, int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamominei64_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamominei64_v_i32m4_m (vbool8_t mask, int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamominei64_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamominei8_v_i64m1_m (vbool64_t mask, int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamominei8_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamominei8_v_i64m2_m (vbool32_t mask, int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamominei8_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamominei8_v_i64m4_m (vbool16_t mask, int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamominei8_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei8_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei8_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamominei8_v_i64m8_m (vbool8_t mask, int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamominei8_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamominei16_v_i64m1_m (vbool64_t mask, int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamominei16_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamominei16_v_i64m2_m (vbool32_t mask, int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamominei16_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamominei16_v_i64m4_m (vbool16_t mask, int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamominei16_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei16_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei16_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamominei16_v_i64m8_m (vbool8_t mask, int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamominei16_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamominei32_v_i64m1_m (vbool64_t mask, int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamominei32_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamominei32_v_i64m2_m (vbool32_t mask, int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamominei32_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamominei32_v_i64m4_m (vbool16_t mask, int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamominei32_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei32_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei32_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamominei32_v_i64m8_m (vbool8_t mask, int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamominei32_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamominei64_v_i64m1_m (vbool64_t mask, int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamominei64_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamominei64_v_i64m2_m (vbool32_t mask, int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamominei64_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamominei64_v_i64m4_m (vbool16_t mask, int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamominei64_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominei64_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominei64_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamomin.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamominei64_v_i64m8_m (vbool8_t mask, int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamominei64_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamominuei8_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamominuei8_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamominuei8_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamominuei8_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamominuei8_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamominuei8_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamominuei8_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamominuei8_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamominuei8_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamominuei8_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamominuei16_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamominuei16_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamominuei16_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamominuei16_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamominuei16_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamominuei16_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamominuei16_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamominuei16_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamominuei16_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamominuei16_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamominuei32_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamominuei32_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamominuei32_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamominuei32_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamominuei32_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamominuei32_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamominuei32_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamominuei32_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamominuei32_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamominuei32_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamominuei64_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamominuei64_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamominuei64_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamominuei64_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamominuei64_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamominuei64_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamominuei64_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamominuei64_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamominuei8_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamominuei8_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamominuei8_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamominuei8_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamominuei8_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamominuei8_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei8_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei8_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamominuei8_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamominuei8_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamominuei16_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamominuei16_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamominuei16_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamominuei16_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamominuei16_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamominuei16_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei16_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei16_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamominuei16_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamominuei16_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamominuei32_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamominuei32_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamominuei32_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamominuei32_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamominuei32_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamominuei32_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei32_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei32_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamominuei32_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamominuei32_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamominuei64_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamominuei64_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamominuei64_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamominuei64_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamominuei64_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamominuei64_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamominuei64_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamominuei64_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamominu.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamominuei64_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamominuei64_v_u64m8_m(mask, base, bindex, value, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics/vamoor.c b/clang/test/CodeGen/RISCV/rvv-intrinsics/vamoor.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics/vamoor.c @@ -0,0 +1,2250 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv32 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV32 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -Werror -Wall -o - %s -S >/dev/null 2>&1 +// RUN: FileCheck --check-prefix=ASM --allow-empty %s + +// ASM-NOT: warning +#include + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoorei8_v_i32mf2 (int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamoorei8_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoorei8_v_i32m1 (int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamoorei8_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoorei8_v_i32m2 (int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamoorei8_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoorei8_v_i32m4 (int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamoorei8_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoorei8_v_i32m8 (int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamoorei8_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoorei16_v_i32mf2 (int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamoorei16_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoorei16_v_i32m1 (int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamoorei16_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoorei16_v_i32m2 (int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamoorei16_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoorei16_v_i32m4 (int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamoorei16_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoorei16_v_i32m8 (int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamoorei16_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoorei32_v_i32mf2 (int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamoorei32_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoorei32_v_i32m1 (int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamoorei32_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoorei32_v_i32m2 (int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamoorei32_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoorei32_v_i32m4 (int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamoorei32_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoorei32_v_i32m8 (int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamoorei32_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoorei64_v_i32mf2 (int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamoorei64_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoorei64_v_i32m1 (int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamoorei64_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoorei64_v_i32m2 (int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamoorei64_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoorei64_v_i32m4 (int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamoorei64_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoorei8_v_i64m1 (int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamoorei8_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoorei8_v_i64m2 (int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamoorei8_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoorei8_v_i64m4 (int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamoorei8_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoorei8_v_i64m8 (int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamoorei8_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoorei16_v_i64m1 (int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamoorei16_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoorei16_v_i64m2 (int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamoorei16_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoorei16_v_i64m4 (int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamoorei16_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoorei16_v_i64m8 (int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamoorei16_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoorei32_v_i64m1 (int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamoorei32_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoorei32_v_i64m2 (int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamoorei32_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoorei32_v_i64m4 (int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamoorei32_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoorei32_v_i64m8 (int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamoorei32_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoorei64_v_i64m1 (int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamoorei64_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoorei64_v_i64m2 (int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamoorei64_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoorei64_v_i64m4 (int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamoorei64_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoorei64_v_i64m8 (int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamoorei64_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoorei8_v_u32mf2 (uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamoorei8_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoorei8_v_u32m1 (uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamoorei8_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoorei8_v_u32m2 (uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamoorei8_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoorei8_v_u32m4 (uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamoorei8_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoorei8_v_u32m8 (uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamoorei8_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoorei16_v_u32mf2 (uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamoorei16_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoorei16_v_u32m1 (uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamoorei16_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoorei16_v_u32m2 (uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamoorei16_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoorei16_v_u32m4 (uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamoorei16_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoorei16_v_u32m8 (uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamoorei16_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoorei32_v_u32mf2 (uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamoorei32_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoorei32_v_u32m1 (uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamoorei32_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoorei32_v_u32m2 (uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamoorei32_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoorei32_v_u32m4 (uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamoorei32_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoorei32_v_u32m8 (uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamoorei32_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoorei64_v_u32mf2 (uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamoorei64_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoorei64_v_u32m1 (uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamoorei64_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoorei64_v_u32m2 (uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamoorei64_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoorei64_v_u32m4 (uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamoorei64_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoorei8_v_u64m1 (uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamoorei8_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoorei8_v_u64m2 (uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamoorei8_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoorei8_v_u64m4 (uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamoorei8_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoorei8_v_u64m8 (uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamoorei8_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoorei16_v_u64m1 (uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamoorei16_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoorei16_v_u64m2 (uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamoorei16_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoorei16_v_u64m4 (uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamoorei16_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoorei16_v_u64m8 (uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamoorei16_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoorei32_v_u64m1 (uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamoorei32_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoorei32_v_u64m2 (uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamoorei32_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoorei32_v_u64m4 (uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamoorei32_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoorei32_v_u64m8 (uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamoorei32_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoorei64_v_u64m1 (uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamoorei64_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoorei64_v_u64m2 (uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamoorei64_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoorei64_v_u64m4 (uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamoorei64_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoorei64_v_u64m8 (uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamoorei64_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoorei8_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamoorei8_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoorei8_v_i32m1_m (vbool32_t mask, int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamoorei8_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoorei8_v_i32m2_m (vbool16_t mask, int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamoorei8_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoorei8_v_i32m4_m (vbool8_t mask, int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamoorei8_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoorei8_v_i32m8_m (vbool4_t mask, int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamoorei8_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoorei16_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamoorei16_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoorei16_v_i32m1_m (vbool32_t mask, int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamoorei16_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoorei16_v_i32m2_m (vbool16_t mask, int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamoorei16_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoorei16_v_i32m4_m (vbool8_t mask, int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamoorei16_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoorei16_v_i32m8_m (vbool4_t mask, int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamoorei16_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoorei32_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamoorei32_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoorei32_v_i32m1_m (vbool32_t mask, int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamoorei32_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoorei32_v_i32m2_m (vbool16_t mask, int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamoorei32_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoorei32_v_i32m4_m (vbool8_t mask, int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamoorei32_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoorei32_v_i32m8_m (vbool4_t mask, int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamoorei32_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoorei64_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamoorei64_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoorei64_v_i32m1_m (vbool32_t mask, int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamoorei64_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoorei64_v_i32m2_m (vbool16_t mask, int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamoorei64_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoorei64_v_i32m4_m (vbool8_t mask, int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamoorei64_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoorei8_v_i64m1_m (vbool64_t mask, int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamoorei8_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoorei8_v_i64m2_m (vbool32_t mask, int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamoorei8_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoorei8_v_i64m4_m (vbool16_t mask, int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamoorei8_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoorei8_v_i64m8_m (vbool8_t mask, int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamoorei8_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoorei16_v_i64m1_m (vbool64_t mask, int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamoorei16_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoorei16_v_i64m2_m (vbool32_t mask, int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamoorei16_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoorei16_v_i64m4_m (vbool16_t mask, int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamoorei16_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoorei16_v_i64m8_m (vbool8_t mask, int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamoorei16_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoorei32_v_i64m1_m (vbool64_t mask, int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamoorei32_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoorei32_v_i64m2_m (vbool32_t mask, int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamoorei32_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoorei32_v_i64m4_m (vbool16_t mask, int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamoorei32_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoorei32_v_i64m8_m (vbool8_t mask, int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamoorei32_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoorei64_v_i64m1_m (vbool64_t mask, int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamoorei64_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoorei64_v_i64m2_m (vbool32_t mask, int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamoorei64_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoorei64_v_i64m4_m (vbool16_t mask, int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamoorei64_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoorei64_v_i64m8_m (vbool8_t mask, int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamoorei64_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoorei8_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamoorei8_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoorei8_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamoorei8_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoorei8_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamoorei8_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoorei8_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamoorei8_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoorei8_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamoorei8_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoorei16_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamoorei16_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoorei16_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamoorei16_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoorei16_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamoorei16_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoorei16_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamoorei16_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoorei16_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamoorei16_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoorei32_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamoorei32_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoorei32_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamoorei32_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoorei32_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamoorei32_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoorei32_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamoorei32_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoorei32_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamoorei32_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoorei64_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamoorei64_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoorei64_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamoorei64_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoorei64_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamoorei64_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoorei64_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamoorei64_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoorei8_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamoorei8_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoorei8_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamoorei8_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoorei8_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamoorei8_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei8_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei8_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoorei8_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamoorei8_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoorei16_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamoorei16_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoorei16_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamoorei16_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoorei16_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamoorei16_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei16_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei16_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoorei16_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamoorei16_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoorei32_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamoorei32_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoorei32_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamoorei32_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoorei32_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamoorei32_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei32_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei32_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoorei32_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamoorei32_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoorei64_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamoorei64_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoorei64_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamoorei64_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoorei64_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamoorei64_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoorei64_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoorei64_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoor.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoorei64_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamoorei64_v_u64m8_m(mask, base, bindex, value, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics/vamoswap.c b/clang/test/CodeGen/RISCV/rvv-intrinsics/vamoswap.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics/vamoswap.c @@ -0,0 +1,3372 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv32 -target-feature +m -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -target-feature +experimental-zfh -target-feature +experimental-zvamo -disable-O0-optnone -fallow-half-arguments-and-returns -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV32 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -target-feature +experimental-zfh -target-feature +experimental-zvamo -disable-O0-optnone -fallow-half-arguments-and-returns -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +f -target-feature +d -target-feature +experimental-v \ +// RUN: -target-feature +experimental-zfh -target-feature +experimental-zvamo -fallow-half-arguments-and-returns -Werror -Wall -o - %s -S >/dev/null 2>&1 | FileCheck --check-prefix=ASM --allow-empty %s + +// ASM-NOT: warning +#include + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoswapei8_v_i32mf2 (int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamoswapei8_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoswapei8_v_i32m1 (int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamoswapei8_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoswapei8_v_i32m2 (int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamoswapei8_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoswapei8_v_i32m4 (int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamoswapei8_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoswapei8_v_i32m8 (int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamoswapei8_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoswapei16_v_i32mf2 (int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamoswapei16_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoswapei16_v_i32m1 (int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamoswapei16_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoswapei16_v_i32m2 (int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamoswapei16_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoswapei16_v_i32m4 (int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamoswapei16_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoswapei16_v_i32m8 (int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamoswapei16_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoswapei32_v_i32mf2 (int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamoswapei32_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoswapei32_v_i32m1 (int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamoswapei32_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoswapei32_v_i32m2 (int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamoswapei32_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoswapei32_v_i32m4 (int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamoswapei32_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoswapei32_v_i32m8 (int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamoswapei32_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoswapei64_v_i32mf2 (int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamoswapei64_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoswapei64_v_i32m1 (int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamoswapei64_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoswapei64_v_i32m2 (int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamoswapei64_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoswapei64_v_i32m4 (int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamoswapei64_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoswapei8_v_i64m1 (int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamoswapei8_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoswapei8_v_i64m2 (int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamoswapei8_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoswapei8_v_i64m4 (int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamoswapei8_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoswapei8_v_i64m8 (int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamoswapei8_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoswapei16_v_i64m1 (int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamoswapei16_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoswapei16_v_i64m2 (int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamoswapei16_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoswapei16_v_i64m4 (int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamoswapei16_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoswapei16_v_i64m8 (int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamoswapei16_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoswapei32_v_i64m1 (int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamoswapei32_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoswapei32_v_i64m2 (int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamoswapei32_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoswapei32_v_i64m4 (int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamoswapei32_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoswapei32_v_i64m8 (int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamoswapei32_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoswapei64_v_i64m1 (int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamoswapei64_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoswapei64_v_i64m2 (int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamoswapei64_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoswapei64_v_i64m4 (int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamoswapei64_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoswapei64_v_i64m8 (int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamoswapei64_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoswapei8_v_u32mf2 (uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamoswapei8_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoswapei8_v_u32m1 (uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamoswapei8_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoswapei8_v_u32m2 (uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamoswapei8_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoswapei8_v_u32m4 (uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamoswapei8_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoswapei8_v_u32m8 (uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamoswapei8_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoswapei16_v_u32mf2 (uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamoswapei16_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoswapei16_v_u32m1 (uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamoswapei16_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoswapei16_v_u32m2 (uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamoswapei16_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoswapei16_v_u32m4 (uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamoswapei16_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoswapei16_v_u32m8 (uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamoswapei16_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoswapei32_v_u32mf2 (uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamoswapei32_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoswapei32_v_u32m1 (uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamoswapei32_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoswapei32_v_u32m2 (uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamoswapei32_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoswapei32_v_u32m4 (uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamoswapei32_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoswapei32_v_u32m8 (uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamoswapei32_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoswapei64_v_u32mf2 (uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamoswapei64_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoswapei64_v_u32m1 (uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamoswapei64_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoswapei64_v_u32m2 (uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamoswapei64_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoswapei64_v_u32m4 (uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamoswapei64_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoswapei8_v_u64m1 (uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamoswapei8_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoswapei8_v_u64m2 (uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamoswapei8_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoswapei8_v_u64m4 (uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamoswapei8_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoswapei8_v_u64m8 (uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamoswapei8_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoswapei16_v_u64m1 (uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamoswapei16_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoswapei16_v_u64m2 (uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamoswapei16_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoswapei16_v_u64m4 (uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamoswapei16_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoswapei16_v_u64m8 (uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamoswapei16_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoswapei32_v_u64m1 (uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamoswapei32_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoswapei32_v_u64m2 (uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamoswapei32_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoswapei32_v_u64m4 (uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamoswapei32_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoswapei32_v_u64m8 (uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamoswapei32_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoswapei64_v_u64m1 (uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamoswapei64_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoswapei64_v_u64m2 (uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamoswapei64_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoswapei64_v_u64m4 (uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamoswapei64_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoswapei64_v_u64m8 (uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamoswapei64_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32mf2_t test_vamoswapei8_v_f32mf2 (float *base, vuint8mf8_t bindex, vfloat32mf2_t value, size_t vl) { + return vamoswapei8_v_f32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m1_t test_vamoswapei8_v_f32m1 (float *base, vuint8mf4_t bindex, vfloat32m1_t value, size_t vl) { + return vamoswapei8_v_f32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m2_t test_vamoswapei8_v_f32m2 (float *base, vuint8mf2_t bindex, vfloat32m2_t value, size_t vl) { + return vamoswapei8_v_f32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m4_t test_vamoswapei8_v_f32m4 (float *base, vuint8m1_t bindex, vfloat32m4_t value, size_t vl) { + return vamoswapei8_v_f32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16f32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16f32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m8_t test_vamoswapei8_v_f32m8 (float *base, vuint8m2_t bindex, vfloat32m8_t value, size_t vl) { + return vamoswapei8_v_f32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32mf2_t test_vamoswapei16_v_f32mf2 (float *base, vuint16mf4_t bindex, vfloat32mf2_t value, size_t vl) { + return vamoswapei16_v_f32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m1_t test_vamoswapei16_v_f32m1 (float *base, vuint16mf2_t bindex, vfloat32m1_t value, size_t vl) { + return vamoswapei16_v_f32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m2_t test_vamoswapei16_v_f32m2 (float *base, vuint16m1_t bindex, vfloat32m2_t value, size_t vl) { + return vamoswapei16_v_f32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m4_t test_vamoswapei16_v_f32m4 (float *base, vuint16m2_t bindex, vfloat32m4_t value, size_t vl) { + return vamoswapei16_v_f32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16f32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16f32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m8_t test_vamoswapei16_v_f32m8 (float *base, vuint16m4_t bindex, vfloat32m8_t value, size_t vl) { + return vamoswapei16_v_f32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32mf2_t test_vamoswapei32_v_f32mf2 (float *base, vuint32mf2_t bindex, vfloat32mf2_t value, size_t vl) { + return vamoswapei32_v_f32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m1_t test_vamoswapei32_v_f32m1 (float *base, vuint32m1_t bindex, vfloat32m1_t value, size_t vl) { + return vamoswapei32_v_f32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m2_t test_vamoswapei32_v_f32m2 (float *base, vuint32m2_t bindex, vfloat32m2_t value, size_t vl) { + return vamoswapei32_v_f32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m4_t test_vamoswapei32_v_f32m4 (float *base, vuint32m4_t bindex, vfloat32m4_t value, size_t vl) { + return vamoswapei32_v_f32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16f32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv16f32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m8_t test_vamoswapei32_v_f32m8 (float *base, vuint32m8_t bindex, vfloat32m8_t value, size_t vl) { + return vamoswapei32_v_f32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32mf2_t test_vamoswapei64_v_f32mf2 (float *base, vuint64m1_t bindex, vfloat32mf2_t value, size_t vl) { + return vamoswapei64_v_f32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m1_t test_vamoswapei64_v_f32m1 (float *base, vuint64m2_t bindex, vfloat32m1_t value, size_t vl) { + return vamoswapei64_v_f32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m2_t test_vamoswapei64_v_f32m2 (float *base, vuint64m4_t bindex, vfloat32m2_t value, size_t vl) { + return vamoswapei64_v_f32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m4_t test_vamoswapei64_v_f32m4 (float *base, vuint64m8_t bindex, vfloat32m4_t value, size_t vl) { + return vamoswapei64_v_f32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m1_t test_vamoswapei8_v_f64m1 (double *base, vuint8mf8_t bindex, vfloat64m1_t value, size_t vl) { + return vamoswapei8_v_f64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m2_t test_vamoswapei8_v_f64m2 (double *base, vuint8mf4_t bindex, vfloat64m2_t value, size_t vl) { + return vamoswapei8_v_f64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m4_t test_vamoswapei8_v_f64m4 (double *base, vuint8mf2_t bindex, vfloat64m4_t value, size_t vl) { + return vamoswapei8_v_f64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m8_t test_vamoswapei8_v_f64m8 (double *base, vuint8m1_t bindex, vfloat64m8_t value, size_t vl) { + return vamoswapei8_v_f64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m1_t test_vamoswapei16_v_f64m1 (double *base, vuint16mf4_t bindex, vfloat64m1_t value, size_t vl) { + return vamoswapei16_v_f64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m2_t test_vamoswapei16_v_f64m2 (double *base, vuint16mf2_t bindex, vfloat64m2_t value, size_t vl) { + return vamoswapei16_v_f64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m4_t test_vamoswapei16_v_f64m4 (double *base, vuint16m1_t bindex, vfloat64m4_t value, size_t vl) { + return vamoswapei16_v_f64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m8_t test_vamoswapei16_v_f64m8 (double *base, vuint16m2_t bindex, vfloat64m8_t value, size_t vl) { + return vamoswapei16_v_f64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m1_t test_vamoswapei32_v_f64m1 (double *base, vuint32mf2_t bindex, vfloat64m1_t value, size_t vl) { + return vamoswapei32_v_f64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m2_t test_vamoswapei32_v_f64m2 (double *base, vuint32m1_t bindex, vfloat64m2_t value, size_t vl) { + return vamoswapei32_v_f64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m4_t test_vamoswapei32_v_f64m4 (double *base, vuint32m2_t bindex, vfloat64m4_t value, size_t vl) { + return vamoswapei32_v_f64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m8_t test_vamoswapei32_v_f64m8 (double *base, vuint32m4_t bindex, vfloat64m8_t value, size_t vl) { + return vamoswapei32_v_f64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv1f64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m1_t test_vamoswapei64_v_f64m1 (double *base, vuint64m1_t bindex, vfloat64m1_t value, size_t vl) { + return vamoswapei64_v_f64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv2f64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m2_t test_vamoswapei64_v_f64m2 (double *base, vuint64m2_t bindex, vfloat64m2_t value, size_t vl) { + return vamoswapei64_v_f64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv4f64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m4_t test_vamoswapei64_v_f64m4 (double *base, vuint64m4_t bindex, vfloat64m4_t value, size_t vl) { + return vamoswapei64_v_f64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.nxv8f64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m8_t test_vamoswapei64_v_f64m8 (double *base, vuint64m8_t bindex, vfloat64m8_t value, size_t vl) { + return vamoswapei64_v_f64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoswapei8_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamoswapei8_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoswapei8_v_i32m1_m (vbool32_t mask, int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamoswapei8_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoswapei8_v_i32m2_m (vbool16_t mask, int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamoswapei8_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoswapei8_v_i32m4_m (vbool8_t mask, int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamoswapei8_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoswapei8_v_i32m8_m (vbool4_t mask, int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamoswapei8_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoswapei16_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamoswapei16_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoswapei16_v_i32m1_m (vbool32_t mask, int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamoswapei16_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoswapei16_v_i32m2_m (vbool16_t mask, int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamoswapei16_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoswapei16_v_i32m4_m (vbool8_t mask, int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamoswapei16_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoswapei16_v_i32m8_m (vbool4_t mask, int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamoswapei16_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoswapei32_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamoswapei32_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoswapei32_v_i32m1_m (vbool32_t mask, int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamoswapei32_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoswapei32_v_i32m2_m (vbool16_t mask, int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamoswapei32_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoswapei32_v_i32m4_m (vbool8_t mask, int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamoswapei32_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoswapei32_v_i32m8_m (vbool4_t mask, int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamoswapei32_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoswapei64_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamoswapei64_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoswapei64_v_i32m1_m (vbool32_t mask, int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamoswapei64_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoswapei64_v_i32m2_m (vbool16_t mask, int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamoswapei64_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoswapei64_v_i32m4_m (vbool8_t mask, int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamoswapei64_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoswapei8_v_i64m1_m (vbool64_t mask, int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamoswapei8_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoswapei8_v_i64m2_m (vbool32_t mask, int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamoswapei8_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoswapei8_v_i64m4_m (vbool16_t mask, int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamoswapei8_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoswapei8_v_i64m8_m (vbool8_t mask, int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamoswapei8_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoswapei16_v_i64m1_m (vbool64_t mask, int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamoswapei16_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoswapei16_v_i64m2_m (vbool32_t mask, int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamoswapei16_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoswapei16_v_i64m4_m (vbool16_t mask, int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamoswapei16_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoswapei16_v_i64m8_m (vbool8_t mask, int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamoswapei16_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoswapei32_v_i64m1_m (vbool64_t mask, int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamoswapei32_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoswapei32_v_i64m2_m (vbool32_t mask, int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamoswapei32_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoswapei32_v_i64m4_m (vbool16_t mask, int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamoswapei32_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoswapei32_v_i64m8_m (vbool8_t mask, int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamoswapei32_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoswapei64_v_i64m1_m (vbool64_t mask, int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamoswapei64_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoswapei64_v_i64m2_m (vbool32_t mask, int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamoswapei64_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoswapei64_v_i64m4_m (vbool16_t mask, int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamoswapei64_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoswapei64_v_i64m8_m (vbool8_t mask, int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamoswapei64_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoswapei8_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamoswapei8_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoswapei8_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamoswapei8_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoswapei8_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamoswapei8_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoswapei8_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamoswapei8_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoswapei8_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamoswapei8_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoswapei16_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamoswapei16_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoswapei16_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamoswapei16_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoswapei16_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamoswapei16_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoswapei16_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamoswapei16_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoswapei16_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamoswapei16_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoswapei32_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamoswapei32_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoswapei32_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamoswapei32_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoswapei32_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamoswapei32_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoswapei32_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamoswapei32_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoswapei32_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamoswapei32_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoswapei64_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamoswapei64_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoswapei64_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamoswapei64_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoswapei64_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamoswapei64_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoswapei64_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamoswapei64_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoswapei8_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamoswapei8_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoswapei8_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamoswapei8_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoswapei8_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamoswapei8_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoswapei8_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamoswapei8_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoswapei16_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamoswapei16_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoswapei16_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamoswapei16_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoswapei16_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamoswapei16_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoswapei16_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamoswapei16_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoswapei32_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamoswapei32_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoswapei32_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamoswapei32_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoswapei32_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamoswapei32_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoswapei32_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamoswapei32_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoswapei64_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamoswapei64_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoswapei64_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamoswapei64_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoswapei64_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamoswapei64_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoswapei64_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamoswapei64_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32mf2_t test_vamoswapei8_v_f32mf2_m (vbool64_t mask, float *base, vuint8mf8_t bindex, vfloat32mf2_t value, size_t vl) { + return vamoswapei8_v_f32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m1_t test_vamoswapei8_v_f32m1_m (vbool32_t mask, float *base, vuint8mf4_t bindex, vfloat32m1_t value, size_t vl) { + return vamoswapei8_v_f32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m2_t test_vamoswapei8_v_f32m2_m (vbool16_t mask, float *base, vuint8mf2_t bindex, vfloat32m2_t value, size_t vl) { + return vamoswapei8_v_f32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m4_t test_vamoswapei8_v_f32m4_m (vbool8_t mask, float *base, vuint8m1_t bindex, vfloat32m4_t value, size_t vl) { + return vamoswapei8_v_f32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16f32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16f32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m8_t test_vamoswapei8_v_f32m8_m (vbool4_t mask, float *base, vuint8m2_t bindex, vfloat32m8_t value, size_t vl) { + return vamoswapei8_v_f32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32mf2_t test_vamoswapei16_v_f32mf2_m (vbool64_t mask, float *base, vuint16mf4_t bindex, vfloat32mf2_t value, size_t vl) { + return vamoswapei16_v_f32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m1_t test_vamoswapei16_v_f32m1_m (vbool32_t mask, float *base, vuint16mf2_t bindex, vfloat32m1_t value, size_t vl) { + return vamoswapei16_v_f32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m2_t test_vamoswapei16_v_f32m2_m (vbool16_t mask, float *base, vuint16m1_t bindex, vfloat32m2_t value, size_t vl) { + return vamoswapei16_v_f32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m4_t test_vamoswapei16_v_f32m4_m (vbool8_t mask, float *base, vuint16m2_t bindex, vfloat32m4_t value, size_t vl) { + return vamoswapei16_v_f32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16f32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16f32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m8_t test_vamoswapei16_v_f32m8_m (vbool4_t mask, float *base, vuint16m4_t bindex, vfloat32m8_t value, size_t vl) { + return vamoswapei16_v_f32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32mf2_t test_vamoswapei32_v_f32mf2_m (vbool64_t mask, float *base, vuint32mf2_t bindex, vfloat32mf2_t value, size_t vl) { + return vamoswapei32_v_f32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m1_t test_vamoswapei32_v_f32m1_m (vbool32_t mask, float *base, vuint32m1_t bindex, vfloat32m1_t value, size_t vl) { + return vamoswapei32_v_f32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m2_t test_vamoswapei32_v_f32m2_m (vbool16_t mask, float *base, vuint32m2_t bindex, vfloat32m2_t value, size_t vl) { + return vamoswapei32_v_f32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m4_t test_vamoswapei32_v_f32m4_m (vbool8_t mask, float *base, vuint32m4_t bindex, vfloat32m4_t value, size_t vl) { + return vamoswapei32_v_f32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16f32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv16f32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m8_t test_vamoswapei32_v_f32m8_m (vbool4_t mask, float *base, vuint32m8_t bindex, vfloat32m8_t value, size_t vl) { + return vamoswapei32_v_f32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32mf2_t test_vamoswapei64_v_f32mf2_m (vbool64_t mask, float *base, vuint64m1_t bindex, vfloat32mf2_t value, size_t vl) { + return vamoswapei64_v_f32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m1_t test_vamoswapei64_v_f32m1_m (vbool32_t mask, float *base, vuint64m2_t bindex, vfloat32m1_t value, size_t vl) { + return vamoswapei64_v_f32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m2_t test_vamoswapei64_v_f32m2_m (vbool16_t mask, float *base, vuint64m4_t bindex, vfloat32m2_t value, size_t vl) { + return vamoswapei64_v_f32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast float* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat32m4_t test_vamoswapei64_v_f32m4_m (vbool8_t mask, float *base, vuint64m8_t bindex, vfloat32m4_t value, size_t vl) { + return vamoswapei64_v_f32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m1_t test_vamoswapei8_v_f64m1_m (vbool64_t mask, double *base, vuint8mf8_t bindex, vfloat64m1_t value, size_t vl) { + return vamoswapei8_v_f64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m2_t test_vamoswapei8_v_f64m2_m (vbool32_t mask, double *base, vuint8mf4_t bindex, vfloat64m2_t value, size_t vl) { + return vamoswapei8_v_f64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m4_t test_vamoswapei8_v_f64m4_m (vbool16_t mask, double *base, vuint8mf2_t bindex, vfloat64m4_t value, size_t vl) { + return vamoswapei8_v_f64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei8_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei8_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m8_t test_vamoswapei8_v_f64m8_m (vbool8_t mask, double *base, vuint8m1_t bindex, vfloat64m8_t value, size_t vl) { + return vamoswapei8_v_f64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m1_t test_vamoswapei16_v_f64m1_m (vbool64_t mask, double *base, vuint16mf4_t bindex, vfloat64m1_t value, size_t vl) { + return vamoswapei16_v_f64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m2_t test_vamoswapei16_v_f64m2_m (vbool32_t mask, double *base, vuint16mf2_t bindex, vfloat64m2_t value, size_t vl) { + return vamoswapei16_v_f64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m4_t test_vamoswapei16_v_f64m4_m (vbool16_t mask, double *base, vuint16m1_t bindex, vfloat64m4_t value, size_t vl) { + return vamoswapei16_v_f64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei16_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei16_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m8_t test_vamoswapei16_v_f64m8_m (vbool8_t mask, double *base, vuint16m2_t bindex, vfloat64m8_t value, size_t vl) { + return vamoswapei16_v_f64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m1_t test_vamoswapei32_v_f64m1_m (vbool64_t mask, double *base, vuint32mf2_t bindex, vfloat64m1_t value, size_t vl) { + return vamoswapei32_v_f64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m2_t test_vamoswapei32_v_f64m2_m (vbool32_t mask, double *base, vuint32m1_t bindex, vfloat64m2_t value, size_t vl) { + return vamoswapei32_v_f64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m4_t test_vamoswapei32_v_f64m4_m (vbool16_t mask, double *base, vuint32m2_t bindex, vfloat64m4_t value, size_t vl) { + return vamoswapei32_v_f64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei32_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei32_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m8_t test_vamoswapei32_v_f64m8_m (vbool8_t mask, double *base, vuint32m4_t bindex, vfloat64m8_t value, size_t vl) { + return vamoswapei32_v_f64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv1f64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m1_t test_vamoswapei64_v_f64m1_m (vbool64_t mask, double *base, vuint64m1_t bindex, vfloat64m1_t value, size_t vl) { + return vamoswapei64_v_f64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv2f64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m2_t test_vamoswapei64_v_f64m2_m (vbool32_t mask, double *base, vuint64m2_t bindex, vfloat64m2_t value, size_t vl) { + return vamoswapei64_v_f64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv4f64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m4_t test_vamoswapei64_v_f64m4_m (vbool16_t mask, double *base, vuint64m4_t bindex, vfloat64m4_t value, size_t vl) { + return vamoswapei64_v_f64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoswapei64_v_f64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoswapei64_v_f64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast double* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoswap.mask.nxv8f64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vfloat64m8_t test_vamoswapei64_v_f64m8_m (vbool8_t mask, double *base, vuint64m8_t bindex, vfloat64m8_t value, size_t vl) { + return vamoswapei64_v_f64m8_m(mask, base, bindex, value, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics/vamoxor.c b/clang/test/CodeGen/RISCV/rvv-intrinsics/vamoxor.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics/vamoxor.c @@ -0,0 +1,2250 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv32 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV32 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s +// RUN: %clang_cc1 -triple riscv64 -target-feature +m -target-feature +experimental-v -target-feature +experimental-zvamo -Werror -Wall -o - %s -S >/dev/null 2>&1 +// RUN: FileCheck --check-prefix=ASM --allow-empty %s + +// ASM-NOT: warning +#include + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoxorei8_v_i32mf2 (int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamoxorei8_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoxorei8_v_i32m1 (int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamoxorei8_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoxorei8_v_i32m2 (int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamoxorei8_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoxorei8_v_i32m4 (int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamoxorei8_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoxorei8_v_i32m8 (int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamoxorei8_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoxorei16_v_i32mf2 (int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamoxorei16_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoxorei16_v_i32m1 (int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamoxorei16_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoxorei16_v_i32m2 (int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamoxorei16_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoxorei16_v_i32m4 (int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamoxorei16_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoxorei16_v_i32m8 (int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamoxorei16_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoxorei32_v_i32mf2 (int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamoxorei32_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoxorei32_v_i32m1 (int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamoxorei32_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoxorei32_v_i32m2 (int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamoxorei32_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoxorei32_v_i32m4 (int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamoxorei32_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoxorei32_v_i32m8 (int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamoxorei32_v_i32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoxorei64_v_i32mf2 (int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamoxorei64_v_i32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoxorei64_v_i32m1 (int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamoxorei64_v_i32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoxorei64_v_i32m2 (int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamoxorei64_v_i32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoxorei64_v_i32m4 (int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamoxorei64_v_i32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoxorei8_v_i64m1 (int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamoxorei8_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoxorei8_v_i64m2 (int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamoxorei8_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoxorei8_v_i64m4 (int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamoxorei8_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoxorei8_v_i64m8 (int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamoxorei8_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoxorei16_v_i64m1 (int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamoxorei16_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoxorei16_v_i64m2 (int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamoxorei16_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoxorei16_v_i64m4 (int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamoxorei16_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoxorei16_v_i64m8 (int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamoxorei16_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoxorei32_v_i64m1 (int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamoxorei32_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoxorei32_v_i64m2 (int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamoxorei32_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoxorei32_v_i64m4 (int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamoxorei32_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoxorei32_v_i64m8 (int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamoxorei32_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoxorei64_v_i64m1 (int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamoxorei64_v_i64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoxorei64_v_i64m2 (int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamoxorei64_v_i64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoxorei64_v_i64m4 (int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamoxorei64_v_i64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoxorei64_v_i64m8 (int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamoxorei64_v_i64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoxorei8_v_u32mf2 (uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamoxorei8_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoxorei8_v_u32m1 (uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamoxorei8_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoxorei8_v_u32m2 (uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamoxorei8_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoxorei8_v_u32m4 (uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamoxorei8_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoxorei8_v_u32m8 (uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamoxorei8_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoxorei16_v_u32mf2 (uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamoxorei16_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoxorei16_v_u32m1 (uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamoxorei16_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoxorei16_v_u32m2 (uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamoxorei16_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoxorei16_v_u32m4 (uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamoxorei16_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoxorei16_v_u32m8 (uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamoxorei16_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoxorei32_v_u32mf2 (uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamoxorei32_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoxorei32_v_u32m1 (uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamoxorei32_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoxorei32_v_u32m2 (uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamoxorei32_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoxorei32_v_u32m4 (uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamoxorei32_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u32m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u32m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoxorei32_v_u32m8 (uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamoxorei32_v_u32m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u32mf2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u32mf2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoxorei64_v_u32mf2 (uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamoxorei64_v_u32mf2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u32m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u32m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoxorei64_v_u32m1 (uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamoxorei64_v_u32m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u32m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u32m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoxorei64_v_u32m2 (uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamoxorei64_v_u32m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u32m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u32m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoxorei64_v_u32m4 (uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamoxorei64_v_u32m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoxorei8_v_u64m1 (uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamoxorei8_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoxorei8_v_u64m2 (uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamoxorei8_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoxorei8_v_u64m4 (uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamoxorei8_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoxorei8_v_u64m8 (uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamoxorei8_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoxorei16_v_u64m1 (uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamoxorei16_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoxorei16_v_u64m2 (uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamoxorei16_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoxorei16_v_u64m4 (uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamoxorei16_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoxorei16_v_u64m8 (uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamoxorei16_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoxorei32_v_u64m1 (uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamoxorei32_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoxorei32_v_u64m2 (uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamoxorei32_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoxorei32_v_u64m4 (uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamoxorei32_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoxorei32_v_u64m8 (uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamoxorei32_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u64m1( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u64m1( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoxorei64_v_u64m1 (uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamoxorei64_v_u64m1(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u64m2( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u64m2( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoxorei64_v_u64m2 (uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamoxorei64_v_u64m2(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u64m4( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u64m4( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoxorei64_v_u64m4 (uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamoxorei64_v_u64m4(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u64m8( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u64m8( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoxorei64_v_u64m8 (uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamoxorei64_v_u64m8(base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoxorei8_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint8mf8_t bindex, vint32mf2_t value, size_t vl) { + return vamoxorei8_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoxorei8_v_i32m1_m (vbool32_t mask, int32_t *base, vuint8mf4_t bindex, vint32m1_t value, size_t vl) { + return vamoxorei8_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoxorei8_v_i32m2_m (vbool16_t mask, int32_t *base, vuint8mf2_t bindex, vint32m2_t value, size_t vl) { + return vamoxorei8_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoxorei8_v_i32m4_m (vbool8_t mask, int32_t *base, vuint8m1_t bindex, vint32m4_t value, size_t vl) { + return vamoxorei8_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoxorei8_v_i32m8_m (vbool4_t mask, int32_t *base, vuint8m2_t bindex, vint32m8_t value, size_t vl) { + return vamoxorei8_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoxorei16_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint16mf4_t bindex, vint32mf2_t value, size_t vl) { + return vamoxorei16_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoxorei16_v_i32m1_m (vbool32_t mask, int32_t *base, vuint16mf2_t bindex, vint32m1_t value, size_t vl) { + return vamoxorei16_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoxorei16_v_i32m2_m (vbool16_t mask, int32_t *base, vuint16m1_t bindex, vint32m2_t value, size_t vl) { + return vamoxorei16_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoxorei16_v_i32m4_m (vbool8_t mask, int32_t *base, vuint16m2_t bindex, vint32m4_t value, size_t vl) { + return vamoxorei16_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoxorei16_v_i32m8_m (vbool4_t mask, int32_t *base, vuint16m4_t bindex, vint32m8_t value, size_t vl) { + return vamoxorei16_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoxorei32_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint32mf2_t bindex, vint32mf2_t value, size_t vl) { + return vamoxorei32_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoxorei32_v_i32m1_m (vbool32_t mask, int32_t *base, vuint32m1_t bindex, vint32m1_t value, size_t vl) { + return vamoxorei32_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoxorei32_v_i32m2_m (vbool16_t mask, int32_t *base, vuint32m2_t bindex, vint32m2_t value, size_t vl) { + return vamoxorei32_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoxorei32_v_i32m4_m (vbool8_t mask, int32_t *base, vuint32m4_t bindex, vint32m4_t value, size_t vl) { + return vamoxorei32_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m8_t test_vamoxorei32_v_i32m8_m (vbool4_t mask, int32_t *base, vuint32m8_t bindex, vint32m8_t value, size_t vl) { + return vamoxorei32_v_i32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32mf2_t test_vamoxorei64_v_i32mf2_m (vbool64_t mask, int32_t *base, vuint64m1_t bindex, vint32mf2_t value, size_t vl) { + return vamoxorei64_v_i32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m1_t test_vamoxorei64_v_i32m1_m (vbool32_t mask, int32_t *base, vuint64m2_t bindex, vint32m1_t value, size_t vl) { + return vamoxorei64_v_i32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m2_t test_vamoxorei64_v_i32m2_m (vbool16_t mask, int32_t *base, vuint64m4_t bindex, vint32m2_t value, size_t vl) { + return vamoxorei64_v_i32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint32m4_t test_vamoxorei64_v_i32m4_m (vbool8_t mask, int32_t *base, vuint64m8_t bindex, vint32m4_t value, size_t vl) { + return vamoxorei64_v_i32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoxorei8_v_i64m1_m (vbool64_t mask, int64_t *base, vuint8mf8_t bindex, vint64m1_t value, size_t vl) { + return vamoxorei8_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoxorei8_v_i64m2_m (vbool32_t mask, int64_t *base, vuint8mf4_t bindex, vint64m2_t value, size_t vl) { + return vamoxorei8_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoxorei8_v_i64m4_m (vbool16_t mask, int64_t *base, vuint8mf2_t bindex, vint64m4_t value, size_t vl) { + return vamoxorei8_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoxorei8_v_i64m8_m (vbool8_t mask, int64_t *base, vuint8m1_t bindex, vint64m8_t value, size_t vl) { + return vamoxorei8_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoxorei16_v_i64m1_m (vbool64_t mask, int64_t *base, vuint16mf4_t bindex, vint64m1_t value, size_t vl) { + return vamoxorei16_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoxorei16_v_i64m2_m (vbool32_t mask, int64_t *base, vuint16mf2_t bindex, vint64m2_t value, size_t vl) { + return vamoxorei16_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoxorei16_v_i64m4_m (vbool16_t mask, int64_t *base, vuint16m1_t bindex, vint64m4_t value, size_t vl) { + return vamoxorei16_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoxorei16_v_i64m8_m (vbool8_t mask, int64_t *base, vuint16m2_t bindex, vint64m8_t value, size_t vl) { + return vamoxorei16_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoxorei32_v_i64m1_m (vbool64_t mask, int64_t *base, vuint32mf2_t bindex, vint64m1_t value, size_t vl) { + return vamoxorei32_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoxorei32_v_i64m2_m (vbool32_t mask, int64_t *base, vuint32m1_t bindex, vint64m2_t value, size_t vl) { + return vamoxorei32_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoxorei32_v_i64m4_m (vbool16_t mask, int64_t *base, vuint32m2_t bindex, vint64m4_t value, size_t vl) { + return vamoxorei32_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoxorei32_v_i64m8_m (vbool8_t mask, int64_t *base, vuint32m4_t bindex, vint64m8_t value, size_t vl) { + return vamoxorei32_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m1_t test_vamoxorei64_v_i64m1_m (vbool64_t mask, int64_t *base, vuint64m1_t bindex, vint64m1_t value, size_t vl) { + return vamoxorei64_v_i64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m2_t test_vamoxorei64_v_i64m2_m (vbool32_t mask, int64_t *base, vuint64m2_t bindex, vint64m2_t value, size_t vl) { + return vamoxorei64_v_i64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m4_t test_vamoxorei64_v_i64m4_m (vbool16_t mask, int64_t *base, vuint64m4_t bindex, vint64m4_t value, size_t vl) { + return vamoxorei64_v_i64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_i64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_i64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vint64m8_t test_vamoxorei64_v_i64m8_m (vbool8_t mask, int64_t *base, vuint64m8_t bindex, vint64m8_t value, size_t vl) { + return vamoxorei64_v_i64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoxorei8_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint8mf8_t bindex, vuint32mf2_t value, size_t vl) { + return vamoxorei8_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoxorei8_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint8mf4_t bindex, vuint32m1_t value, size_t vl) { + return vamoxorei8_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoxorei8_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint8mf2_t bindex, vuint32m2_t value, size_t vl) { + return vamoxorei8_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoxorei8_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint8m1_t bindex, vuint32m4_t value, size_t vl) { + return vamoxorei8_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv16i32.nxv16i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv16i32.nxv16i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoxorei8_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint8m2_t bindex, vuint32m8_t value, size_t vl) { + return vamoxorei8_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoxorei16_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint16mf4_t bindex, vuint32mf2_t value, size_t vl) { + return vamoxorei16_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoxorei16_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint16mf2_t bindex, vuint32m1_t value, size_t vl) { + return vamoxorei16_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoxorei16_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint16m1_t bindex, vuint32m2_t value, size_t vl) { + return vamoxorei16_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoxorei16_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint16m2_t bindex, vuint32m4_t value, size_t vl) { + return vamoxorei16_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv16i32.nxv16i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv16i32.nxv16i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoxorei16_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint16m4_t bindex, vuint32m8_t value, size_t vl) { + return vamoxorei16_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoxorei32_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint32mf2_t bindex, vuint32mf2_t value, size_t vl) { + return vamoxorei32_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoxorei32_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint32m1_t bindex, vuint32m1_t value, size_t vl) { + return vamoxorei32_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoxorei32_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint32m2_t bindex, vuint32m2_t value, size_t vl) { + return vamoxorei32_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoxorei32_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint32m4_t bindex, vuint32m4_t value, size_t vl) { + return vamoxorei32_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u32m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv16i32.nxv16i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u32m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv16i32.nxv16i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m8_t test_vamoxorei32_v_u32m8_m (vbool4_t mask, uint32_t *base, vuint32m8_t bindex, vuint32m8_t value, size_t vl) { + return vamoxorei32_v_u32m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u32mf2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u32mf2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i32.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32mf2_t test_vamoxorei64_v_u32mf2_m (vbool64_t mask, uint32_t *base, vuint64m1_t bindex, vuint32mf2_t value, size_t vl) { + return vamoxorei64_v_u32mf2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u32m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u32m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i32.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m1_t test_vamoxorei64_v_u32m1_m (vbool32_t mask, uint32_t *base, vuint64m2_t bindex, vuint32m1_t value, size_t vl) { + return vamoxorei64_v_u32m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u32m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u32m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i32.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m2_t test_vamoxorei64_v_u32m2_m (vbool16_t mask, uint32_t *base, vuint64m4_t bindex, vuint32m2_t value, size_t vl) { + return vamoxorei64_v_u32m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u32m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u32m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i32* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i32.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint32m4_t test_vamoxorei64_v_u32m4_m (vbool8_t mask, uint32_t *base, vuint64m8_t bindex, vuint32m4_t value, size_t vl) { + return vamoxorei64_v_u32m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoxorei8_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint8mf8_t bindex, vuint64m1_t value, size_t vl) { + return vamoxorei8_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoxorei8_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint8mf4_t bindex, vuint64m2_t value, size_t vl) { + return vamoxorei8_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoxorei8_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint8mf2_t bindex, vuint64m4_t value, size_t vl) { + return vamoxorei8_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei8_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i8.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei8_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i8.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoxorei8_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint8m1_t bindex, vuint64m8_t value, size_t vl) { + return vamoxorei8_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoxorei16_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint16mf4_t bindex, vuint64m1_t value, size_t vl) { + return vamoxorei16_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoxorei16_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint16mf2_t bindex, vuint64m2_t value, size_t vl) { + return vamoxorei16_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoxorei16_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint16m1_t bindex, vuint64m4_t value, size_t vl) { + return vamoxorei16_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei16_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i16.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei16_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i16.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoxorei16_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint16m2_t bindex, vuint64m8_t value, size_t vl) { + return vamoxorei16_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoxorei32_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint32mf2_t bindex, vuint64m1_t value, size_t vl) { + return vamoxorei32_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoxorei32_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint32m1_t bindex, vuint64m2_t value, size_t vl) { + return vamoxorei32_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoxorei32_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint32m2_t bindex, vuint64m4_t value, size_t vl) { + return vamoxorei32_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei32_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i32.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei32_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i32.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoxorei32_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint32m4_t bindex, vuint64m8_t value, size_t vl) { + return vamoxorei32_v_u64m8_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u64m1_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u64m1_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv1i64.nxv1i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m1_t test_vamoxorei64_v_u64m1_m (vbool64_t mask, uint64_t *base, vuint64m1_t bindex, vuint64m1_t value, size_t vl) { + return vamoxorei64_v_u64m1_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u64m2_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u64m2_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv2i64.nxv2i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m2_t test_vamoxorei64_v_u64m2_m (vbool32_t mask, uint64_t *base, vuint64m2_t bindex, vuint64m2_t value, size_t vl) { + return vamoxorei64_v_u64m2_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u64m4_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u64m4_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv4i64.nxv4i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m4_t test_vamoxorei64_v_u64m4_m (vbool16_t mask, uint64_t *base, vuint64m4_t bindex, vuint64m4_t value, size_t vl) { + return vamoxorei64_v_u64m4_m(mask, base, bindex, value, vl); +} + +// CHECK-RV32-LABEL: @test_vamoxorei64_v_u64m8_m( +// CHECK-RV32-NEXT: entry: +// CHECK-RV32-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV32-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i64.i32(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i32 [[VL:%.*]]) +// CHECK-RV32-NEXT: ret [[TMP1]] +// +// CHECK-RV64-LABEL: @test_vamoxorei64_v_u64m8_m( +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = bitcast i64* [[BASE:%.*]] to * +// CHECK-RV64-NEXT: [[TMP1:%.*]] = call @llvm.riscv.vamoxor.mask.nxv8i64.nxv8i64.i64(* [[TMP0]], [[BINDEX:%.*]], [[VALUE:%.*]], [[MASK:%.*]], i64 [[VL:%.*]]) +// CHECK-RV64-NEXT: ret [[TMP1]] +// +vuint64m8_t test_vamoxorei64_v_u64m8_m (vbool8_t mask, uint64_t *base, vuint64m8_t bindex, vuint64m8_t value, size_t vl) { + return vamoxorei64_v_u64m8_m(mask, base, bindex, value, vl); +} + diff --git a/clang/utils/TableGen/RISCVVEmitter.cpp b/clang/utils/TableGen/RISCVVEmitter.cpp --- a/clang/utils/TableGen/RISCVVEmitter.cpp +++ b/clang/utils/TableGen/RISCVVEmitter.cpp @@ -139,7 +139,8 @@ Basic = 0, F = 1 << 1, D = 1 << 2, - Zfh = 1 << 3 + Zfh = 1 << 3, + Zvamo = 1 << 4, }; // TODO refactor RVVIntrinsic class design after support all intrinsic @@ -174,7 +175,8 @@ bool HasMaskedOffOperand, bool HasVL, bool HasNoMaskedOverloaded, bool HasAutoDef, StringRef ManualCodegen, const RVVTypes &Types, const std::vector &IntrinsicTypes, - const std::vector &PermuteOperands); + const std::vector &PermuteOperands, + StringRef RequiredExtension); ~RVVIntrinsic() = default; StringRef getName() const { return Name; } @@ -716,7 +718,8 @@ bool HasNoMaskedOverloaded, bool HasAutoDef, StringRef ManualCodegen, const RVVTypes &OutInTypes, const std::vector &NewIntrinsicTypes, - const std::vector &PermuteOperands) + const std::vector &PermuteOperands, + StringRef RequiredExtension) : IRName(IRName), HasSideEffects(HasSideEffects), IsMask(IsMask), HasMaskedOffOperand(HasMaskedOffOperand), HasVL(HasVL), HasNoMaskedOverloaded(HasNoMaskedOverloaded), HasAutoDef(HasAutoDef), @@ -742,6 +745,8 @@ else if (T->isFloatVector(64)) RISCVExtensions |= RISCVExtension::D; } + if (RequiredExtension == "Zvamo") + RISCVExtensions |= RISCVExtension::Zvamo; // Init OutputType and InputTypes OutputType = OutInTypes[0]; @@ -1073,6 +1078,7 @@ R->getValueAsListOfInts("IntrinsicTypes"); std::vector PermuteOperands = R->getValueAsListOfInts("PermuteOperands"); + StringRef RequiredExtension = R->getValueAsString("RequiredExtension"); StringRef IRName = R->getValueAsString("IRName"); StringRef IRNameMask = R->getValueAsString("IRNameMask"); @@ -1117,7 +1123,7 @@ Name, SuffixStr, MangledName, IRName, HasSideEffects, /*IsMask=*/false, /*HasMaskedOffOperand=*/false, HasVL, HasNoMaskedOverloaded, HasAutoDef, ManualCodegen, Types.getValue(), - IntrinsicTypes, PermuteOperands)); + IntrinsicTypes, PermuteOperands, RequiredExtension)); if (HasMask) { // Create a mask intrinsic Optional MaskTypes = @@ -1126,7 +1132,8 @@ Name, SuffixStr, MangledName, IRNameMask, HasSideEffects, /*IsMask=*/true, HasMaskedOffOperand, HasVL, HasNoMaskedOverloaded, HasAutoDef, ManualCodegenMask, - MaskTypes.getValue(), IntrinsicTypes, PermuteOperands)); + MaskTypes.getValue(), IntrinsicTypes, PermuteOperands, + RequiredExtension)); } } // end for Log2LMULList } // end for TypeRange @@ -1199,6 +1206,8 @@ OS << LS << "defined(__riscv_d)"; if (Extents & RISCVExtension::Zfh) OS << LS << "defined(__riscv_zfh)"; + if (Extents & RISCVExtension::Zvamo) + OS << LS << "defined(__riscv_zvamo)"; OS << "\n"; return true; }