diff --git a/llvm/include/llvm/IR/IntrinsicsRISCV.td b/llvm/include/llvm/IR/IntrinsicsRISCV.td --- a/llvm/include/llvm/IR/IntrinsicsRISCV.td +++ b/llvm/include/llvm/IR/IntrinsicsRISCV.td @@ -231,8 +231,22 @@ [LLVMMatchType<0>, LLVMMatchType<0>, llvm_anyint_ty], [IntrNoMem]>, RISCVVIntrinsic; // For destination vector type is the same as first and second source vector. - // Input: (vector_in, vector_in, vl) - class RISCVBinaryAAAMask + // Input: (vector_in, int_vector_in, vl) + class RISCVRGatherVVNoMask + : Intrinsic<[llvm_anyvector_ty], + [LLVMMatchType<0>, LLVMVectorOfBitcastsToInt<0>, llvm_anyint_ty], + [IntrNoMem]>, RISCVVIntrinsic; + // For destination vector type is the same as first and second source vector. + // Input: (vector_in, vector_in, int_vector_in, vl) + class RISCVRGatherVVMask + : Intrinsic<[llvm_anyvector_ty], + [LLVMMatchType<0>, LLVMMatchType<0>, LLVMVectorOfBitcastsToInt<0>, + LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, llvm_anyint_ty], + [IntrNoMem]>, RISCVVIntrinsic; + // For destination vector type is the same as first vector and the second + // vector is a mask. + // Input: (vector_in, vector_in, mask, vl) + class RISCVBinaryAAM : Intrinsic<[llvm_anyvector_ty], [LLVMMatchType<0>, LLVMMatchType<0>, LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, llvm_anyint_ty], @@ -254,6 +268,21 @@ [IntrNoMem]>, RISCVVIntrinsic { let ExtendOperand = 3; } + // For destination vector type is the same as first source vector. + // Input: (vector_in, xlen_in, vl) + class RISCVGatherVXNoMask + : Intrinsic<[llvm_anyvector_ty], + [LLVMMatchType<0>, llvm_anyint_ty, LLVMMatchType<1>], + [IntrNoMem]>, RISCVVIntrinsic { + } + // For destination vector type is the same as first source vector (with mask). + // Input: (maskedoff, vector_in, xlen_in, mask, vl) + class RISCVGatherVXMask + : Intrinsic<[llvm_anyvector_ty], + [LLVMMatchType<0>, LLVMMatchType<0>, llvm_anyint_ty, + LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, LLVMMatchType<1>], + [IntrNoMem]>, RISCVVIntrinsic { + } // For destination vector type is NOT the same as first source vector. // Input: (vector_in, vector_in/scalar_in, vl) class RISCVBinaryABXNoMask @@ -688,6 +717,14 @@ def "int_riscv_" # NAME : RISCVBinaryAAXNoMask; def "int_riscv_" # NAME # "_mask" : RISCVBinaryAAXMask; } + multiclass RISCVRGatherVV { + def "int_riscv_" # NAME : RISCVRGatherVVNoMask; + def "int_riscv_" # NAME # "_mask" : RISCVRGatherVVMask; + } + multiclass RISCVRGatherVX { + def "int_riscv_" # NAME : RISCVGatherVXNoMask; + def "int_riscv_" # NAME # "_mask" : RISCVGatherVXMask; + } // ABX means the destination type(A) is different from the first source // type(B). X means any type for the second source operand. multiclass RISCVBinaryABX { @@ -965,10 +1002,11 @@ defm vfslide1up : RISCVBinaryAAX; defm vfslide1down : RISCVBinaryAAX; - defm vrgather : RISCVBinaryAAX; + defm vrgather_vv : RISCVRGatherVV; + defm vrgather_vx : RISCVRGatherVX; defm vrgatherei16 : RISCVBinaryAAX; - def "int_riscv_vcompress" : RISCVBinaryAAAMask; + def "int_riscv_vcompress" : RISCVBinaryAAM; defm vaaddu : RISCVSaturatingBinaryAAX; defm vaadd : RISCVSaturatingBinaryAAX; diff --git a/llvm/lib/Target/RISCV/RISCVInstrInfoVPseudos.td b/llvm/lib/Target/RISCV/RISCVInstrInfoVPseudos.td --- a/llvm/lib/Target/RISCV/RISCVInstrInfoVPseudos.td +++ b/llvm/lib/Target/RISCV/RISCVInstrInfoVPseudos.td @@ -2933,9 +2933,9 @@ multiclass VPatBinaryV_VV_VX_VI_INT vtilist, Operand ImmType = simm5> { - defm "" : VPatBinaryV_VV_INT; - defm "" : VPatBinaryV_VX_INT; - defm "" : VPatBinaryV_VI; + defm "" : VPatBinaryV_VV_INT; + defm "" : VPatBinaryV_VX_INT; + defm "" : VPatBinaryV_VI; } multiclass VPatReductionV_VS { diff --git a/llvm/test/CodeGen/RISCV/rvv/vrgather-rv32.ll b/llvm/test/CodeGen/RISCV/rvv/vrgather-rv32.ll --- a/llvm/test/CodeGen/RISCV/rvv/vrgather-rv32.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vrgather-rv32.ll @@ -1,7 +1,7 @@ ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc -mtriple=riscv32 -mattr=+experimental-v,+d,+experimental-zfh -verify-machineinstrs \ ; RUN: --riscv-no-aliases < %s | FileCheck %s -declare @llvm.riscv.vrgather.nxv1i8.nxv1i8( +declare @llvm.riscv.vrgather.vv.nxv1i8.i32( , , i32); @@ -14,7 +14,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1i8.nxv1i8( + %a = call @llvm.riscv.vrgather.vv.nxv1i8.i32( %0, %1, i32 %2) @@ -22,7 +22,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1i8.nxv1i8( +declare @llvm.riscv.vrgather.vv.mask.nxv1i8.i32( , , , @@ -36,7 +36,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1i8.nxv1i8( + %a = call @llvm.riscv.vrgather.vv.mask.nxv1i8.i32( %0, %1, %2, @@ -46,7 +46,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2i8.nxv2i8( +declare @llvm.riscv.vrgather.vv.nxv2i8.i32( , , i32); @@ -59,7 +59,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2i8.nxv2i8( + %a = call @llvm.riscv.vrgather.vv.nxv2i8.i32( %0, %1, i32 %2) @@ -67,7 +67,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2i8.nxv2i8( +declare @llvm.riscv.vrgather.vv.mask.nxv2i8.i32( , , , @@ -81,7 +81,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2i8.nxv2i8( + %a = call @llvm.riscv.vrgather.vv.mask.nxv2i8.i32( %0, %1, %2, @@ -91,7 +91,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4i8.nxv4i8( +declare @llvm.riscv.vrgather.vv.nxv4i8.i32( , , i32); @@ -104,7 +104,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4i8.nxv4i8( + %a = call @llvm.riscv.vrgather.vv.nxv4i8.i32( %0, %1, i32 %2) @@ -112,7 +112,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4i8.nxv4i8( +declare @llvm.riscv.vrgather.vv.mask.nxv4i8.i32( , , , @@ -126,7 +126,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4i8.nxv4i8( + %a = call @llvm.riscv.vrgather.vv.mask.nxv4i8.i32( %0, %1, %2, @@ -136,7 +136,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8i8.nxv8i8( +declare @llvm.riscv.vrgather.vv.nxv8i8.i32( , , i32); @@ -149,7 +149,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8i8.nxv8i8( + %a = call @llvm.riscv.vrgather.vv.nxv8i8.i32( %0, %1, i32 %2) @@ -157,7 +157,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8i8.nxv8i8( +declare @llvm.riscv.vrgather.vv.mask.nxv8i8.i32( , , , @@ -171,7 +171,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8i8.nxv8i8( + %a = call @llvm.riscv.vrgather.vv.mask.nxv8i8.i32( %0, %1, %2, @@ -181,7 +181,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv16i8.nxv16i8( +declare @llvm.riscv.vrgather.vv.nxv16i8.i32( , , i32); @@ -194,7 +194,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16i8.nxv16i8( + %a = call @llvm.riscv.vrgather.vv.nxv16i8.i32( %0, %1, i32 %2) @@ -202,7 +202,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv16i8.nxv16i8( +declare @llvm.riscv.vrgather.vv.mask.nxv16i8.i32( , , , @@ -216,7 +216,7 @@ ; CHECK-NEXT: vrgather.vv v8, v10, v12, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16i8.nxv16i8( + %a = call @llvm.riscv.vrgather.vv.mask.nxv16i8.i32( %0, %1, %2, @@ -226,7 +226,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv32i8.nxv32i8( +declare @llvm.riscv.vrgather.vv.nxv32i8.i32( , , i32); @@ -239,7 +239,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv32i8.nxv32i8( + %a = call @llvm.riscv.vrgather.vv.nxv32i8.i32( %0, %1, i32 %2) @@ -247,7 +247,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv32i8.nxv32i8( +declare @llvm.riscv.vrgather.vv.mask.nxv32i8.i32( , , , @@ -261,7 +261,7 @@ ; CHECK-NEXT: vrgather.vv v8, v12, v16, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv32i8.nxv32i8( + %a = call @llvm.riscv.vrgather.vv.mask.nxv32i8.i32( %0, %1, %2, @@ -271,7 +271,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv64i8.nxv64i8( +declare @llvm.riscv.vrgather.vv.nxv64i8.i32( , , i32); @@ -284,7 +284,7 @@ ; CHECK-NEXT: vmv8r.v v8, v24 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv64i8.nxv64i8( + %a = call @llvm.riscv.vrgather.vv.nxv64i8.i32( %0, %1, i32 %2) @@ -292,7 +292,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv64i8.nxv64i8( +declare @llvm.riscv.vrgather.vv.mask.nxv64i8.i32( , , , @@ -308,7 +308,7 @@ ; CHECK-NEXT: vrgather.vv v8, v16, v24, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv64i8.nxv64i8( + %a = call @llvm.riscv.vrgather.vv.mask.nxv64i8.i32( %0, %1, %2, @@ -318,7 +318,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv1i16.nxv1i16( +declare @llvm.riscv.vrgather.vv.nxv1i16.i32( , , i32); @@ -331,7 +331,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1i16.nxv1i16( + %a = call @llvm.riscv.vrgather.vv.nxv1i16.i32( %0, %1, i32 %2) @@ -339,7 +339,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1i16.nxv1i16( +declare @llvm.riscv.vrgather.vv.mask.nxv1i16.i32( , , , @@ -353,7 +353,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1i16.nxv1i16( + %a = call @llvm.riscv.vrgather.vv.mask.nxv1i16.i32( %0, %1, %2, @@ -363,7 +363,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2i16.nxv2i16( +declare @llvm.riscv.vrgather.vv.nxv2i16.i32( , , i32); @@ -376,7 +376,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2i16.nxv2i16( + %a = call @llvm.riscv.vrgather.vv.nxv2i16.i32( %0, %1, i32 %2) @@ -384,7 +384,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2i16.nxv2i16( +declare @llvm.riscv.vrgather.vv.mask.nxv2i16.i32( , , , @@ -398,7 +398,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2i16.nxv2i16( + %a = call @llvm.riscv.vrgather.vv.mask.nxv2i16.i32( %0, %1, %2, @@ -408,7 +408,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4i16.nxv4i16( +declare @llvm.riscv.vrgather.vv.nxv4i16.i32( , , i32); @@ -421,7 +421,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4i16.nxv4i16( + %a = call @llvm.riscv.vrgather.vv.nxv4i16.i32( %0, %1, i32 %2) @@ -429,7 +429,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4i16.nxv4i16( +declare @llvm.riscv.vrgather.vv.mask.nxv4i16.i32( , , , @@ -443,7 +443,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4i16.nxv4i16( + %a = call @llvm.riscv.vrgather.vv.mask.nxv4i16.i32( %0, %1, %2, @@ -453,7 +453,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8i16.nxv8i16( +declare @llvm.riscv.vrgather.vv.nxv8i16.i32( , , i32); @@ -466,7 +466,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8i16.nxv8i16( + %a = call @llvm.riscv.vrgather.vv.nxv8i16.i32( %0, %1, i32 %2) @@ -474,7 +474,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8i16.nxv8i16( +declare @llvm.riscv.vrgather.vv.mask.nxv8i16.i32( , , , @@ -488,7 +488,7 @@ ; CHECK-NEXT: vrgather.vv v8, v10, v12, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8i16.nxv8i16( + %a = call @llvm.riscv.vrgather.vv.mask.nxv8i16.i32( %0, %1, %2, @@ -498,7 +498,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv16i16.nxv16i16( +declare @llvm.riscv.vrgather.vv.nxv16i16.i32( , , i32); @@ -511,7 +511,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16i16.nxv16i16( + %a = call @llvm.riscv.vrgather.vv.nxv16i16.i32( %0, %1, i32 %2) @@ -519,7 +519,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv16i16.nxv16i16( +declare @llvm.riscv.vrgather.vv.mask.nxv16i16.i32( , , , @@ -533,7 +533,7 @@ ; CHECK-NEXT: vrgather.vv v8, v12, v16, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16i16.nxv16i16( + %a = call @llvm.riscv.vrgather.vv.mask.nxv16i16.i32( %0, %1, %2, @@ -543,7 +543,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv32i16.nxv32i16( +declare @llvm.riscv.vrgather.vv.nxv32i16.i32( , , i32); @@ -556,7 +556,7 @@ ; CHECK-NEXT: vmv8r.v v8, v24 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv32i16.nxv32i16( + %a = call @llvm.riscv.vrgather.vv.nxv32i16.i32( %0, %1, i32 %2) @@ -564,7 +564,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv32i16.nxv32i16( +declare @llvm.riscv.vrgather.vv.mask.nxv32i16.i32( , , , @@ -580,7 +580,7 @@ ; CHECK-NEXT: vrgather.vv v8, v16, v24, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv32i16.nxv32i16( + %a = call @llvm.riscv.vrgather.vv.mask.nxv32i16.i32( %0, %1, %2, @@ -590,7 +590,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv1i32.nxv1i32( +declare @llvm.riscv.vrgather.vv.nxv1i32.i32( , , i32); @@ -603,7 +603,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1i32.nxv1i32( + %a = call @llvm.riscv.vrgather.vv.nxv1i32.i32( %0, %1, i32 %2) @@ -611,7 +611,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1i32.nxv1i32( +declare @llvm.riscv.vrgather.vv.mask.nxv1i32.i32( , , , @@ -625,7 +625,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1i32.nxv1i32( + %a = call @llvm.riscv.vrgather.vv.mask.nxv1i32.i32( %0, %1, %2, @@ -635,7 +635,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2i32.nxv2i32( +declare @llvm.riscv.vrgather.vv.nxv2i32.i32( , , i32); @@ -648,7 +648,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2i32.nxv2i32( + %a = call @llvm.riscv.vrgather.vv.nxv2i32.i32( %0, %1, i32 %2) @@ -656,7 +656,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2i32.nxv2i32( +declare @llvm.riscv.vrgather.vv.mask.nxv2i32.i32( , , , @@ -670,7 +670,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2i32.nxv2i32( + %a = call @llvm.riscv.vrgather.vv.mask.nxv2i32.i32( %0, %1, %2, @@ -680,7 +680,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4i32.nxv4i32( +declare @llvm.riscv.vrgather.vv.nxv4i32.i32( , , i32); @@ -693,7 +693,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4i32.nxv4i32( + %a = call @llvm.riscv.vrgather.vv.nxv4i32.i32( %0, %1, i32 %2) @@ -701,7 +701,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4i32.nxv4i32( +declare @llvm.riscv.vrgather.vv.mask.nxv4i32.i32( , , , @@ -715,7 +715,7 @@ ; CHECK-NEXT: vrgather.vv v8, v10, v12, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4i32.nxv4i32( + %a = call @llvm.riscv.vrgather.vv.mask.nxv4i32.i32( %0, %1, %2, @@ -725,7 +725,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8i32.nxv8i32( +declare @llvm.riscv.vrgather.vv.nxv8i32.i32( , , i32); @@ -738,7 +738,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8i32.nxv8i32( + %a = call @llvm.riscv.vrgather.vv.nxv8i32.i32( %0, %1, i32 %2) @@ -746,7 +746,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8i32.nxv8i32( +declare @llvm.riscv.vrgather.vv.mask.nxv8i32.i32( , , , @@ -760,7 +760,7 @@ ; CHECK-NEXT: vrgather.vv v8, v12, v16, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8i32.nxv8i32( + %a = call @llvm.riscv.vrgather.vv.mask.nxv8i32.i32( %0, %1, %2, @@ -770,7 +770,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv16i32.nxv16i32( +declare @llvm.riscv.vrgather.vv.nxv16i32.i32( , , i32); @@ -783,7 +783,7 @@ ; CHECK-NEXT: vmv8r.v v8, v24 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16i32.nxv16i32( + %a = call @llvm.riscv.vrgather.vv.nxv16i32.i32( %0, %1, i32 %2) @@ -791,7 +791,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv16i32.nxv16i32( +declare @llvm.riscv.vrgather.vv.mask.nxv16i32.i32( , , , @@ -807,7 +807,7 @@ ; CHECK-NEXT: vrgather.vv v8, v16, v24, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16i32.nxv16i32( + %a = call @llvm.riscv.vrgather.vv.mask.nxv16i32.i32( %0, %1, %2, @@ -817,7 +817,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv1f16.nxv1i16( +declare @llvm.riscv.vrgather.vv.nxv1f16.i32( , , i32); @@ -830,7 +830,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1f16.nxv1i16( + %a = call @llvm.riscv.vrgather.vv.nxv1f16.i32( %0, %1, i32 %2) @@ -838,7 +838,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1f16.nxv1i16( +declare @llvm.riscv.vrgather.vv.mask.nxv1f16.i32( , , , @@ -852,7 +852,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1f16.nxv1i16( + %a = call @llvm.riscv.vrgather.vv.mask.nxv1f16.i32( %0, %1, %2, @@ -862,7 +862,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2f16.nxv2i16( +declare @llvm.riscv.vrgather.vv.nxv2f16.i32( , , i32); @@ -875,7 +875,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2f16.nxv2i16( + %a = call @llvm.riscv.vrgather.vv.nxv2f16.i32( %0, %1, i32 %2) @@ -883,7 +883,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2f16.nxv2i16( +declare @llvm.riscv.vrgather.vv.mask.nxv2f16.i32( , , , @@ -897,7 +897,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2f16.nxv2i16( + %a = call @llvm.riscv.vrgather.vv.mask.nxv2f16.i32( %0, %1, %2, @@ -907,7 +907,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4f16.nxv4i16( +declare @llvm.riscv.vrgather.vv.nxv4f16.i32( , , i32); @@ -920,7 +920,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4f16.nxv4i16( + %a = call @llvm.riscv.vrgather.vv.nxv4f16.i32( %0, %1, i32 %2) @@ -928,7 +928,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4f16.nxv4i16( +declare @llvm.riscv.vrgather.vv.mask.nxv4f16.i32( , , , @@ -942,7 +942,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4f16.nxv4i16( + %a = call @llvm.riscv.vrgather.vv.mask.nxv4f16.i32( %0, %1, %2, @@ -952,7 +952,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8f16.nxv8i16( +declare @llvm.riscv.vrgather.vv.nxv8f16.i32( , , i32); @@ -965,7 +965,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8f16.nxv8i16( + %a = call @llvm.riscv.vrgather.vv.nxv8f16.i32( %0, %1, i32 %2) @@ -973,7 +973,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8f16.nxv8i16( +declare @llvm.riscv.vrgather.vv.mask.nxv8f16.i32( , , , @@ -987,7 +987,7 @@ ; CHECK-NEXT: vrgather.vv v8, v10, v12, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8f16.nxv8i16( + %a = call @llvm.riscv.vrgather.vv.mask.nxv8f16.i32( %0, %1, %2, @@ -997,7 +997,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv16f16.nxv16i16( +declare @llvm.riscv.vrgather.vv.nxv16f16.i32( , , i32); @@ -1010,7 +1010,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16f16.nxv16i16( + %a = call @llvm.riscv.vrgather.vv.nxv16f16.i32( %0, %1, i32 %2) @@ -1018,7 +1018,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv16f16.nxv16i16( +declare @llvm.riscv.vrgather.vv.mask.nxv16f16.i32( , , , @@ -1032,7 +1032,7 @@ ; CHECK-NEXT: vrgather.vv v8, v12, v16, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16f16.nxv16i16( + %a = call @llvm.riscv.vrgather.vv.mask.nxv16f16.i32( %0, %1, %2, @@ -1042,7 +1042,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv32f16.nxv32i16( +declare @llvm.riscv.vrgather.vv.nxv32f16.i32( , , i32); @@ -1055,7 +1055,7 @@ ; CHECK-NEXT: vmv8r.v v8, v24 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv32f16.nxv32i16( + %a = call @llvm.riscv.vrgather.vv.nxv32f16.i32( %0, %1, i32 %2) @@ -1063,7 +1063,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv32f16.nxv32i16( +declare @llvm.riscv.vrgather.vv.mask.nxv32f16.i32( , , , @@ -1079,7 +1079,7 @@ ; CHECK-NEXT: vrgather.vv v8, v16, v24, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv32f16.nxv32i16( + %a = call @llvm.riscv.vrgather.vv.mask.nxv32f16.i32( %0, %1, %2, @@ -1089,7 +1089,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv1f32.nxv1i32( +declare @llvm.riscv.vrgather.vv.nxv1f32.i32( , , i32); @@ -1102,7 +1102,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1f32.nxv1i32( + %a = call @llvm.riscv.vrgather.vv.nxv1f32.i32( %0, %1, i32 %2) @@ -1110,7 +1110,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1f32.nxv1i32( +declare @llvm.riscv.vrgather.vv.mask.nxv1f32.i32( , , , @@ -1124,7 +1124,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1f32.nxv1i32( + %a = call @llvm.riscv.vrgather.vv.mask.nxv1f32.i32( %0, %1, %2, @@ -1134,7 +1134,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2f32.nxv2i32( +declare @llvm.riscv.vrgather.vv.nxv2f32.i32( , , i32); @@ -1147,7 +1147,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2f32.nxv2i32( + %a = call @llvm.riscv.vrgather.vv.nxv2f32.i32( %0, %1, i32 %2) @@ -1155,7 +1155,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2f32.nxv2i32( +declare @llvm.riscv.vrgather.vv.mask.nxv2f32.i32( , , , @@ -1169,7 +1169,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2f32.nxv2i32( + %a = call @llvm.riscv.vrgather.vv.mask.nxv2f32.i32( %0, %1, %2, @@ -1179,7 +1179,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4f32.nxv4i32( +declare @llvm.riscv.vrgather.vv.nxv4f32.i32( , , i32); @@ -1192,7 +1192,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4f32.nxv4i32( + %a = call @llvm.riscv.vrgather.vv.nxv4f32.i32( %0, %1, i32 %2) @@ -1200,7 +1200,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4f32.nxv4i32( +declare @llvm.riscv.vrgather.vv.mask.nxv4f32.i32( , , , @@ -1214,7 +1214,7 @@ ; CHECK-NEXT: vrgather.vv v8, v10, v12, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4f32.nxv4i32( + %a = call @llvm.riscv.vrgather.vv.mask.nxv4f32.i32( %0, %1, %2, @@ -1224,7 +1224,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8f32.nxv8i32( +declare @llvm.riscv.vrgather.vv.nxv8f32.i32( , , i32); @@ -1237,7 +1237,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8f32.nxv8i32( + %a = call @llvm.riscv.vrgather.vv.nxv8f32.i32( %0, %1, i32 %2) @@ -1245,7 +1245,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8f32.nxv8i32( +declare @llvm.riscv.vrgather.vv.mask.nxv8f32.i32( , , , @@ -1259,7 +1259,7 @@ ; CHECK-NEXT: vrgather.vv v8, v12, v16, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8f32.nxv8i32( + %a = call @llvm.riscv.vrgather.vv.mask.nxv8f32.i32( %0, %1, %2, @@ -1269,7 +1269,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv16f32.nxv16i32( +declare @llvm.riscv.vrgather.vv.nxv16f32.i32( , , i32); @@ -1282,7 +1282,7 @@ ; CHECK-NEXT: vmv8r.v v8, v24 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16f32.nxv16i32( + %a = call @llvm.riscv.vrgather.vv.nxv16f32.i32( %0, %1, i32 %2) @@ -1290,7 +1290,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv16f32.nxv16i32( +declare @llvm.riscv.vrgather.vv.mask.nxv16f32.i32( , , , @@ -1306,7 +1306,7 @@ ; CHECK-NEXT: vrgather.vv v8, v16, v24, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16f32.nxv16i32( + %a = call @llvm.riscv.vrgather.vv.mask.nxv16f32.i32( %0, %1, %2, @@ -1316,7 +1316,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv1f64.nxv1i64( +declare @llvm.riscv.vrgather.vv.nxv1f64.i32( , , i32); @@ -1329,7 +1329,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1f64.nxv1i64( + %a = call @llvm.riscv.vrgather.vv.nxv1f64.i32( %0, %1, i32 %2) @@ -1337,7 +1337,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1f64.nxv1i64( +declare @llvm.riscv.vrgather.vv.mask.nxv1f64.i32( , , , @@ -1351,7 +1351,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1f64.nxv1i64( + %a = call @llvm.riscv.vrgather.vv.mask.nxv1f64.i32( %0, %1, %2, @@ -1361,7 +1361,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2f64.nxv2i64( +declare @llvm.riscv.vrgather.vv.nxv2f64.i32( , , i32); @@ -1374,7 +1374,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2f64.nxv2i64( + %a = call @llvm.riscv.vrgather.vv.nxv2f64.i32( %0, %1, i32 %2) @@ -1382,7 +1382,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2f64.nxv2i64( +declare @llvm.riscv.vrgather.vv.mask.nxv2f64.i32( , , , @@ -1396,7 +1396,7 @@ ; CHECK-NEXT: vrgather.vv v8, v10, v12, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2f64.nxv2i64( + %a = call @llvm.riscv.vrgather.vv.mask.nxv2f64.i32( %0, %1, %2, @@ -1406,7 +1406,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4f64.nxv4i64( +declare @llvm.riscv.vrgather.vv.nxv4f64.i32( , , i32); @@ -1419,7 +1419,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4f64.nxv4i64( + %a = call @llvm.riscv.vrgather.vv.nxv4f64.i32( %0, %1, i32 %2) @@ -1427,7 +1427,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4f64.nxv4i64( +declare @llvm.riscv.vrgather.vv.mask.nxv4f64.i32( , , , @@ -1441,7 +1441,7 @@ ; CHECK-NEXT: vrgather.vv v8, v12, v16, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4f64.nxv4i64( + %a = call @llvm.riscv.vrgather.vv.mask.nxv4f64.i32( %0, %1, %2, @@ -1451,7 +1451,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8f64.nxv8i64( +declare @llvm.riscv.vrgather.vv.nxv8f64.i32( , , i32); @@ -1464,7 +1464,7 @@ ; CHECK-NEXT: vmv8r.v v8, v24 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8f64.nxv8i64( + %a = call @llvm.riscv.vrgather.vv.nxv8f64.i32( %0, %1, i32 %2) @@ -1472,7 +1472,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8f64.nxv8i64( +declare @llvm.riscv.vrgather.vv.mask.nxv8f64.i32( , , , @@ -1488,7 +1488,7 @@ ; CHECK-NEXT: vrgather.vv v8, v16, v24, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8f64.nxv8i64( + %a = call @llvm.riscv.vrgather.vv.mask.nxv8f64.i32( %0, %1, %2, @@ -1498,7 +1498,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv1i8.i32( +declare @llvm.riscv.vrgather.vx.nxv1i8.i32( , i32, i32); @@ -1511,7 +1511,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1i8.i32( + %a = call @llvm.riscv.vrgather.vx.nxv1i8.i32( %0, i32 %1, i32 %2) @@ -1519,7 +1519,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1i8.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv1i8.i32( , , i32, @@ -1533,7 +1533,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1i8.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1i8.i32( %0, %1, i32 %2, @@ -1543,7 +1543,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2i8.i32( +declare @llvm.riscv.vrgather.vx.nxv2i8.i32( , i32, i32); @@ -1556,7 +1556,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2i8.i32( + %a = call @llvm.riscv.vrgather.vx.nxv2i8.i32( %0, i32 %1, i32 %2) @@ -1564,7 +1564,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2i8.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv2i8.i32( , , i32, @@ -1578,7 +1578,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2i8.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2i8.i32( %0, %1, i32 %2, @@ -1588,7 +1588,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4i8.i32( +declare @llvm.riscv.vrgather.vx.nxv4i8.i32( , i32, i32); @@ -1601,7 +1601,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4i8.i32( + %a = call @llvm.riscv.vrgather.vx.nxv4i8.i32( %0, i32 %1, i32 %2) @@ -1609,7 +1609,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4i8.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv4i8.i32( , , i32, @@ -1623,7 +1623,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4i8.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4i8.i32( %0, %1, i32 %2, @@ -1633,7 +1633,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8i8.i32( +declare @llvm.riscv.vrgather.vx.nxv8i8.i32( , i32, i32); @@ -1646,7 +1646,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8i8.i32( + %a = call @llvm.riscv.vrgather.vx.nxv8i8.i32( %0, i32 %1, i32 %2) @@ -1654,7 +1654,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8i8.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv8i8.i32( , , i32, @@ -1668,7 +1668,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8i8.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8i8.i32( %0, %1, i32 %2, @@ -1678,7 +1678,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv16i8.i32( +declare @llvm.riscv.vrgather.vx.nxv16i8.i32( , i32, i32); @@ -1691,7 +1691,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16i8.i32( + %a = call @llvm.riscv.vrgather.vx.nxv16i8.i32( %0, i32 %1, i32 %2) @@ -1699,7 +1699,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv16i8.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv16i8.i32( , , i32, @@ -1713,7 +1713,7 @@ ; CHECK-NEXT: vrgather.vx v8, v10, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16i8.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv16i8.i32( %0, %1, i32 %2, @@ -1723,7 +1723,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv32i8.i32( +declare @llvm.riscv.vrgather.vx.nxv32i8.i32( , i32, i32); @@ -1736,7 +1736,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv32i8.i32( + %a = call @llvm.riscv.vrgather.vx.nxv32i8.i32( %0, i32 %1, i32 %2) @@ -1744,7 +1744,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv32i8.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv32i8.i32( , , i32, @@ -1758,7 +1758,7 @@ ; CHECK-NEXT: vrgather.vx v8, v12, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv32i8.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv32i8.i32( %0, %1, i32 %2, @@ -1768,7 +1768,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv64i8.i32( +declare @llvm.riscv.vrgather.vx.nxv64i8.i32( , i32, i32); @@ -1781,7 +1781,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv64i8.i32( + %a = call @llvm.riscv.vrgather.vx.nxv64i8.i32( %0, i32 %1, i32 %2) @@ -1789,7 +1789,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv64i8.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv64i8.i32( , , i32, @@ -1803,7 +1803,7 @@ ; CHECK-NEXT: vrgather.vx v8, v16, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv64i8.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv64i8.i32( %0, %1, i32 %2, @@ -1813,7 +1813,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv1i16.i32( +declare @llvm.riscv.vrgather.vx.nxv1i16.i32( , i32, i32); @@ -1826,7 +1826,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1i16.i32( + %a = call @llvm.riscv.vrgather.vx.nxv1i16.i32( %0, i32 %1, i32 %2) @@ -1834,7 +1834,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1i16.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv1i16.i32( , , i32, @@ -1848,7 +1848,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1i16.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1i16.i32( %0, %1, i32 %2, @@ -1858,7 +1858,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2i16.i32( +declare @llvm.riscv.vrgather.vx.nxv2i16.i32( , i32, i32); @@ -1871,7 +1871,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2i16.i32( + %a = call @llvm.riscv.vrgather.vx.nxv2i16.i32( %0, i32 %1, i32 %2) @@ -1879,7 +1879,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2i16.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv2i16.i32( , , i32, @@ -1893,7 +1893,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2i16.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2i16.i32( %0, %1, i32 %2, @@ -1903,7 +1903,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4i16.i32( +declare @llvm.riscv.vrgather.vx.nxv4i16.i32( , i32, i32); @@ -1916,7 +1916,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4i16.i32( + %a = call @llvm.riscv.vrgather.vx.nxv4i16.i32( %0, i32 %1, i32 %2) @@ -1924,7 +1924,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4i16.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv4i16.i32( , , i32, @@ -1938,7 +1938,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4i16.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4i16.i32( %0, %1, i32 %2, @@ -1948,7 +1948,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8i16.i32( +declare @llvm.riscv.vrgather.vx.nxv8i16.i32( , i32, i32); @@ -1961,7 +1961,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8i16.i32( + %a = call @llvm.riscv.vrgather.vx.nxv8i16.i32( %0, i32 %1, i32 %2) @@ -1969,7 +1969,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8i16.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv8i16.i32( , , i32, @@ -1983,7 +1983,7 @@ ; CHECK-NEXT: vrgather.vx v8, v10, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8i16.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8i16.i32( %0, %1, i32 %2, @@ -1993,7 +1993,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv16i16.i32( +declare @llvm.riscv.vrgather.vx.nxv16i16.i32( , i32, i32); @@ -2006,7 +2006,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16i16.i32( + %a = call @llvm.riscv.vrgather.vx.nxv16i16.i32( %0, i32 %1, i32 %2) @@ -2014,7 +2014,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv16i16.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv16i16.i32( , , i32, @@ -2028,7 +2028,7 @@ ; CHECK-NEXT: vrgather.vx v8, v12, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16i16.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv16i16.i32( %0, %1, i32 %2, @@ -2038,7 +2038,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv32i16.i32( +declare @llvm.riscv.vrgather.vx.nxv32i16.i32( , i32, i32); @@ -2051,7 +2051,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv32i16.i32( + %a = call @llvm.riscv.vrgather.vx.nxv32i16.i32( %0, i32 %1, i32 %2) @@ -2059,7 +2059,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv32i16.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv32i16.i32( , , i32, @@ -2073,7 +2073,7 @@ ; CHECK-NEXT: vrgather.vx v8, v16, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv32i16.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv32i16.i32( %0, %1, i32 %2, @@ -2083,7 +2083,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv1i32.i32( +declare @llvm.riscv.vrgather.vx.nxv1i32.i32( , i32, i32); @@ -2096,7 +2096,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1i32.i32( + %a = call @llvm.riscv.vrgather.vx.nxv1i32.i32( %0, i32 %1, i32 %2) @@ -2104,7 +2104,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1i32.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv1i32.i32( , , i32, @@ -2118,7 +2118,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1i32.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1i32.i32( %0, %1, i32 %2, @@ -2128,7 +2128,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2i32.i32( +declare @llvm.riscv.vrgather.vx.nxv2i32.i32( , i32, i32); @@ -2141,7 +2141,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2i32.i32( + %a = call @llvm.riscv.vrgather.vx.nxv2i32.i32( %0, i32 %1, i32 %2) @@ -2149,7 +2149,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2i32.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv2i32.i32( , , i32, @@ -2163,7 +2163,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2i32.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2i32.i32( %0, %1, i32 %2, @@ -2173,7 +2173,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4i32.i32( +declare @llvm.riscv.vrgather.vx.nxv4i32.i32( , i32, i32); @@ -2186,7 +2186,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4i32.i32( + %a = call @llvm.riscv.vrgather.vx.nxv4i32.i32( %0, i32 %1, i32 %2) @@ -2194,7 +2194,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4i32.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv4i32.i32( , , i32, @@ -2208,7 +2208,7 @@ ; CHECK-NEXT: vrgather.vx v8, v10, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4i32.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4i32.i32( %0, %1, i32 %2, @@ -2218,7 +2218,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8i32.i32( +declare @llvm.riscv.vrgather.vx.nxv8i32.i32( , i32, i32); @@ -2231,7 +2231,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8i32.i32( + %a = call @llvm.riscv.vrgather.vx.nxv8i32.i32( %0, i32 %1, i32 %2) @@ -2239,7 +2239,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8i32.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv8i32.i32( , , i32, @@ -2253,7 +2253,7 @@ ; CHECK-NEXT: vrgather.vx v8, v12, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8i32.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8i32.i32( %0, %1, i32 %2, @@ -2263,7 +2263,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv16i32.i32( +declare @llvm.riscv.vrgather.vx.nxv16i32.i32( , i32, i32); @@ -2276,7 +2276,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16i32.i32( + %a = call @llvm.riscv.vrgather.vx.nxv16i32.i32( %0, i32 %1, i32 %2) @@ -2284,7 +2284,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv16i32.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv16i32.i32( , , i32, @@ -2298,7 +2298,7 @@ ; CHECK-NEXT: vrgather.vx v8, v16, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16i32.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv16i32.i32( %0, %1, i32 %2, @@ -2308,7 +2308,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv1f16.i32( +declare @llvm.riscv.vrgather.vx.nxv1f16.i32( , i32, i32); @@ -2321,7 +2321,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1f16.i32( + %a = call @llvm.riscv.vrgather.vx.nxv1f16.i32( %0, i32 %1, i32 %2) @@ -2329,7 +2329,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1f16.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv1f16.i32( , , i32, @@ -2343,7 +2343,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1f16.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1f16.i32( %0, %1, i32 %2, @@ -2353,7 +2353,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2f16.i32( +declare @llvm.riscv.vrgather.vx.nxv2f16.i32( , i32, i32); @@ -2366,7 +2366,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2f16.i32( + %a = call @llvm.riscv.vrgather.vx.nxv2f16.i32( %0, i32 %1, i32 %2) @@ -2374,7 +2374,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2f16.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv2f16.i32( , , i32, @@ -2388,7 +2388,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2f16.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2f16.i32( %0, %1, i32 %2, @@ -2398,7 +2398,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4f16.i32( +declare @llvm.riscv.vrgather.vx.nxv4f16.i32( , i32, i32); @@ -2411,7 +2411,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4f16.i32( + %a = call @llvm.riscv.vrgather.vx.nxv4f16.i32( %0, i32 %1, i32 %2) @@ -2419,7 +2419,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4f16.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv4f16.i32( , , i32, @@ -2433,7 +2433,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4f16.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4f16.i32( %0, %1, i32 %2, @@ -2443,7 +2443,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8f16.i32( +declare @llvm.riscv.vrgather.vx.nxv8f16.i32( , i32, i32); @@ -2456,7 +2456,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8f16.i32( + %a = call @llvm.riscv.vrgather.vx.nxv8f16.i32( %0, i32 %1, i32 %2) @@ -2464,7 +2464,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8f16.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv8f16.i32( , , i32, @@ -2478,7 +2478,7 @@ ; CHECK-NEXT: vrgather.vx v8, v10, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8f16.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8f16.i32( %0, %1, i32 %2, @@ -2488,7 +2488,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv16f16.i32( +declare @llvm.riscv.vrgather.vx.nxv16f16.i32( , i32, i32); @@ -2501,7 +2501,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16f16.i32( + %a = call @llvm.riscv.vrgather.vx.nxv16f16.i32( %0, i32 %1, i32 %2) @@ -2509,7 +2509,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv16f16.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv16f16.i32( , , i32, @@ -2523,7 +2523,7 @@ ; CHECK-NEXT: vrgather.vx v8, v12, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16f16.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv16f16.i32( %0, %1, i32 %2, @@ -2533,7 +2533,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv32f16.i32( +declare @llvm.riscv.vrgather.vx.nxv32f16.i32( , i32, i32); @@ -2546,7 +2546,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv32f16.i32( + %a = call @llvm.riscv.vrgather.vx.nxv32f16.i32( %0, i32 %1, i32 %2) @@ -2554,7 +2554,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv32f16.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv32f16.i32( , , i32, @@ -2568,7 +2568,7 @@ ; CHECK-NEXT: vrgather.vx v8, v16, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv32f16.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv32f16.i32( %0, %1, i32 %2, @@ -2578,7 +2578,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv1f32.i32( +declare @llvm.riscv.vrgather.vx.nxv1f32.i32( , i32, i32); @@ -2591,7 +2591,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1f32.i32( + %a = call @llvm.riscv.vrgather.vx.nxv1f32.i32( %0, i32 %1, i32 %2) @@ -2599,7 +2599,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1f32.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv1f32.i32( , , i32, @@ -2613,7 +2613,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1f32.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1f32.i32( %0, %1, i32 %2, @@ -2623,7 +2623,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2f32.i32( +declare @llvm.riscv.vrgather.vx.nxv2f32.i32( , i32, i32); @@ -2636,7 +2636,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2f32.i32( + %a = call @llvm.riscv.vrgather.vx.nxv2f32.i32( %0, i32 %1, i32 %2) @@ -2644,7 +2644,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2f32.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv2f32.i32( , , i32, @@ -2658,7 +2658,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2f32.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2f32.i32( %0, %1, i32 %2, @@ -2668,7 +2668,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4f32.i32( +declare @llvm.riscv.vrgather.vx.nxv4f32.i32( , i32, i32); @@ -2681,7 +2681,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4f32.i32( + %a = call @llvm.riscv.vrgather.vx.nxv4f32.i32( %0, i32 %1, i32 %2) @@ -2689,7 +2689,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4f32.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv4f32.i32( , , i32, @@ -2703,7 +2703,7 @@ ; CHECK-NEXT: vrgather.vx v8, v10, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4f32.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4f32.i32( %0, %1, i32 %2, @@ -2713,7 +2713,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8f32.i32( +declare @llvm.riscv.vrgather.vx.nxv8f32.i32( , i32, i32); @@ -2726,7 +2726,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8f32.i32( + %a = call @llvm.riscv.vrgather.vx.nxv8f32.i32( %0, i32 %1, i32 %2) @@ -2734,7 +2734,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8f32.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv8f32.i32( , , i32, @@ -2748,7 +2748,7 @@ ; CHECK-NEXT: vrgather.vx v8, v12, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8f32.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8f32.i32( %0, %1, i32 %2, @@ -2758,7 +2758,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv16f32.i32( +declare @llvm.riscv.vrgather.vx.nxv16f32.i32( , i32, i32); @@ -2771,7 +2771,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16f32.i32( + %a = call @llvm.riscv.vrgather.vx.nxv16f32.i32( %0, i32 %1, i32 %2) @@ -2779,7 +2779,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv16f32.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv16f32.i32( , , i32, @@ -2793,7 +2793,7 @@ ; CHECK-NEXT: vrgather.vx v8, v16, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16f32.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv16f32.i32( %0, %1, i32 %2, @@ -2803,7 +2803,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv1f64.i32( +declare @llvm.riscv.vrgather.vx.nxv1f64.i32( , i32, i32); @@ -2816,7 +2816,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1f64.i32( + %a = call @llvm.riscv.vrgather.vx.nxv1f64.i32( %0, i32 %1, i32 %2) @@ -2824,7 +2824,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1f64.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv1f64.i32( , , i32, @@ -2838,7 +2838,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1f64.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1f64.i32( %0, %1, i32 %2, @@ -2848,7 +2848,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2f64.i32( +declare @llvm.riscv.vrgather.vx.nxv2f64.i32( , i32, i32); @@ -2861,7 +2861,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2f64.i32( + %a = call @llvm.riscv.vrgather.vx.nxv2f64.i32( %0, i32 %1, i32 %2) @@ -2869,7 +2869,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2f64.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv2f64.i32( , , i32, @@ -2883,7 +2883,7 @@ ; CHECK-NEXT: vrgather.vx v8, v10, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2f64.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2f64.i32( %0, %1, i32 %2, @@ -2893,7 +2893,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4f64.i32( +declare @llvm.riscv.vrgather.vx.nxv4f64.i32( , i32, i32); @@ -2906,7 +2906,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4f64.i32( + %a = call @llvm.riscv.vrgather.vx.nxv4f64.i32( %0, i32 %1, i32 %2) @@ -2914,7 +2914,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4f64.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv4f64.i32( , , i32, @@ -2928,7 +2928,7 @@ ; CHECK-NEXT: vrgather.vx v8, v12, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4f64.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4f64.i32( %0, %1, i32 %2, @@ -2938,7 +2938,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8f64.i32( +declare @llvm.riscv.vrgather.vx.nxv8f64.i32( , i32, i32); @@ -2951,7 +2951,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8f64.i32( + %a = call @llvm.riscv.vrgather.vx.nxv8f64.i32( %0, i32 %1, i32 %2) @@ -2959,7 +2959,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8f64.i32( +declare @llvm.riscv.vrgather.vx.mask.nxv8f64.i32( , , i32, @@ -2973,7 +2973,7 @@ ; CHECK-NEXT: vrgather.vx v8, v16, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8f64.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8f64.i32( %0, %1, i32 %2, @@ -2991,7 +2991,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1i8.i32( + %a = call @llvm.riscv.vrgather.vx.nxv1i8.i32( %0, i32 9, i32 %1) @@ -3006,7 +3006,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1i8.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1i8.i32( %0, %1, i32 9, @@ -3024,7 +3024,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2i8.i32( + %a = call @llvm.riscv.vrgather.vx.nxv2i8.i32( %0, i32 9, i32 %1) @@ -3039,7 +3039,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2i8.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2i8.i32( %0, %1, i32 9, @@ -3057,7 +3057,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4i8.i32( + %a = call @llvm.riscv.vrgather.vx.nxv4i8.i32( %0, i32 9, i32 %1) @@ -3072,7 +3072,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4i8.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4i8.i32( %0, %1, i32 9, @@ -3090,7 +3090,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8i8.i32( + %a = call @llvm.riscv.vrgather.vx.nxv8i8.i32( %0, i32 9, i32 %1) @@ -3105,7 +3105,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8i8.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8i8.i32( %0, %1, i32 9, @@ -3123,7 +3123,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16i8.i32( + %a = call @llvm.riscv.vrgather.vx.nxv16i8.i32( %0, i32 9, i32 %1) @@ -3138,7 +3138,7 @@ ; CHECK-NEXT: vrgather.vi v8, v10, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16i8.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv16i8.i32( %0, %1, i32 9, @@ -3156,7 +3156,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv32i8.i32( + %a = call @llvm.riscv.vrgather.vx.nxv32i8.i32( %0, i32 9, i32 %1) @@ -3171,7 +3171,7 @@ ; CHECK-NEXT: vrgather.vi v8, v12, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv32i8.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv32i8.i32( %0, %1, i32 9, @@ -3189,7 +3189,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv64i8.i32( + %a = call @llvm.riscv.vrgather.vx.nxv64i8.i32( %0, i32 9, i32 %1) @@ -3204,7 +3204,7 @@ ; CHECK-NEXT: vrgather.vi v8, v16, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv64i8.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv64i8.i32( %0, %1, i32 9, @@ -3222,7 +3222,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1i16.i32( + %a = call @llvm.riscv.vrgather.vx.nxv1i16.i32( %0, i32 9, i32 %1) @@ -3237,7 +3237,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1i16.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1i16.i32( %0, %1, i32 9, @@ -3255,7 +3255,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2i16.i32( + %a = call @llvm.riscv.vrgather.vx.nxv2i16.i32( %0, i32 9, i32 %1) @@ -3270,7 +3270,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2i16.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2i16.i32( %0, %1, i32 9, @@ -3288,7 +3288,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4i16.i32( + %a = call @llvm.riscv.vrgather.vx.nxv4i16.i32( %0, i32 9, i32 %1) @@ -3303,7 +3303,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4i16.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4i16.i32( %0, %1, i32 9, @@ -3321,7 +3321,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8i16.i32( + %a = call @llvm.riscv.vrgather.vx.nxv8i16.i32( %0, i32 9, i32 %1) @@ -3336,7 +3336,7 @@ ; CHECK-NEXT: vrgather.vi v8, v10, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8i16.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8i16.i32( %0, %1, i32 9, @@ -3354,7 +3354,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16i16.i32( + %a = call @llvm.riscv.vrgather.vx.nxv16i16.i32( %0, i32 9, i32 %1) @@ -3369,7 +3369,7 @@ ; CHECK-NEXT: vrgather.vi v8, v12, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16i16.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv16i16.i32( %0, %1, i32 9, @@ -3387,7 +3387,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv32i16.i32( + %a = call @llvm.riscv.vrgather.vx.nxv32i16.i32( %0, i32 9, i32 %1) @@ -3402,7 +3402,7 @@ ; CHECK-NEXT: vrgather.vi v8, v16, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv32i16.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv32i16.i32( %0, %1, i32 9, @@ -3420,7 +3420,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1i32.i32( + %a = call @llvm.riscv.vrgather.vx.nxv1i32.i32( %0, i32 9, i32 %1) @@ -3435,7 +3435,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1i32.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1i32.i32( %0, %1, i32 9, @@ -3453,7 +3453,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2i32.i32( + %a = call @llvm.riscv.vrgather.vx.nxv2i32.i32( %0, i32 9, i32 %1) @@ -3468,7 +3468,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2i32.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2i32.i32( %0, %1, i32 9, @@ -3486,7 +3486,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4i32.i32( + %a = call @llvm.riscv.vrgather.vx.nxv4i32.i32( %0, i32 9, i32 %1) @@ -3501,7 +3501,7 @@ ; CHECK-NEXT: vrgather.vi v8, v10, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4i32.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4i32.i32( %0, %1, i32 9, @@ -3519,7 +3519,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8i32.i32( + %a = call @llvm.riscv.vrgather.vx.nxv8i32.i32( %0, i32 9, i32 %1) @@ -3534,7 +3534,7 @@ ; CHECK-NEXT: vrgather.vi v8, v12, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8i32.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8i32.i32( %0, %1, i32 9, @@ -3552,7 +3552,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16i32.i32( + %a = call @llvm.riscv.vrgather.vx.nxv16i32.i32( %0, i32 9, i32 %1) @@ -3567,7 +3567,7 @@ ; CHECK-NEXT: vrgather.vi v8, v16, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16i32.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv16i32.i32( %0, %1, i32 9, @@ -3585,7 +3585,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1f16.i32( + %a = call @llvm.riscv.vrgather.vx.nxv1f16.i32( %0, i32 9, i32 %1) @@ -3600,7 +3600,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1f16.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1f16.i32( %0, %1, i32 9, @@ -3618,7 +3618,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2f16.i32( + %a = call @llvm.riscv.vrgather.vx.nxv2f16.i32( %0, i32 9, i32 %1) @@ -3633,7 +3633,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2f16.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2f16.i32( %0, %1, i32 9, @@ -3651,7 +3651,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4f16.i32( + %a = call @llvm.riscv.vrgather.vx.nxv4f16.i32( %0, i32 9, i32 %1) @@ -3666,7 +3666,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4f16.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4f16.i32( %0, %1, i32 9, @@ -3684,7 +3684,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8f16.i32( + %a = call @llvm.riscv.vrgather.vx.nxv8f16.i32( %0, i32 9, i32 %1) @@ -3699,7 +3699,7 @@ ; CHECK-NEXT: vrgather.vi v8, v10, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8f16.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8f16.i32( %0, %1, i32 9, @@ -3717,7 +3717,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16f16.i32( + %a = call @llvm.riscv.vrgather.vx.nxv16f16.i32( %0, i32 9, i32 %1) @@ -3732,7 +3732,7 @@ ; CHECK-NEXT: vrgather.vi v8, v12, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16f16.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv16f16.i32( %0, %1, i32 9, @@ -3750,7 +3750,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv32f16.i32( + %a = call @llvm.riscv.vrgather.vx.nxv32f16.i32( %0, i32 9, i32 %1) @@ -3765,7 +3765,7 @@ ; CHECK-NEXT: vrgather.vi v8, v16, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv32f16.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv32f16.i32( %0, %1, i32 9, @@ -3783,7 +3783,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1f32.i32( + %a = call @llvm.riscv.vrgather.vx.nxv1f32.i32( %0, i32 9, i32 %1) @@ -3798,7 +3798,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1f32.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1f32.i32( %0, %1, i32 9, @@ -3816,7 +3816,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2f32.i32( + %a = call @llvm.riscv.vrgather.vx.nxv2f32.i32( %0, i32 9, i32 %1) @@ -3831,7 +3831,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2f32.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2f32.i32( %0, %1, i32 9, @@ -3849,7 +3849,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4f32.i32( + %a = call @llvm.riscv.vrgather.vx.nxv4f32.i32( %0, i32 9, i32 %1) @@ -3864,7 +3864,7 @@ ; CHECK-NEXT: vrgather.vi v8, v10, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4f32.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4f32.i32( %0, %1, i32 9, @@ -3882,7 +3882,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8f32.i32( + %a = call @llvm.riscv.vrgather.vx.nxv8f32.i32( %0, i32 9, i32 %1) @@ -3897,7 +3897,7 @@ ; CHECK-NEXT: vrgather.vi v8, v12, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8f32.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8f32.i32( %0, %1, i32 9, @@ -3915,7 +3915,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16f32.i32( + %a = call @llvm.riscv.vrgather.vx.nxv16f32.i32( %0, i32 9, i32 %1) @@ -3930,7 +3930,7 @@ ; CHECK-NEXT: vrgather.vi v8, v16, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16f32.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv16f32.i32( %0, %1, i32 9, @@ -3948,7 +3948,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1f64.i32( + %a = call @llvm.riscv.vrgather.vx.nxv1f64.i32( %0, i32 9, i32 %1) @@ -3963,7 +3963,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1f64.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1f64.i32( %0, %1, i32 9, @@ -3981,7 +3981,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2f64.i32( + %a = call @llvm.riscv.vrgather.vx.nxv2f64.i32( %0, i32 9, i32 %1) @@ -3996,7 +3996,7 @@ ; CHECK-NEXT: vrgather.vi v8, v10, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2f64.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2f64.i32( %0, %1, i32 9, @@ -4014,7 +4014,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4f64.i32( + %a = call @llvm.riscv.vrgather.vx.nxv4f64.i32( %0, i32 9, i32 %1) @@ -4029,7 +4029,7 @@ ; CHECK-NEXT: vrgather.vi v8, v12, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4f64.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4f64.i32( %0, %1, i32 9, @@ -4047,7 +4047,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8f64.i32( + %a = call @llvm.riscv.vrgather.vx.nxv8f64.i32( %0, i32 9, i32 %1) @@ -4062,7 +4062,7 @@ ; CHECK-NEXT: vrgather.vi v8, v16, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8f64.i32( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8f64.i32( %0, %1, i32 9, diff --git a/llvm/test/CodeGen/RISCV/rvv/vrgather-rv64.ll b/llvm/test/CodeGen/RISCV/rvv/vrgather-rv64.ll --- a/llvm/test/CodeGen/RISCV/rvv/vrgather-rv64.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vrgather-rv64.ll @@ -1,7 +1,7 @@ ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc -mtriple=riscv64 -mattr=+experimental-v,+d,+experimental-zfh -verify-machineinstrs \ ; RUN: --riscv-no-aliases < %s | FileCheck %s -declare @llvm.riscv.vrgather.nxv1i8.nxv1i8( +declare @llvm.riscv.vrgather.vv.nxv1i8.i64( , , i64); @@ -14,7 +14,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1i8.nxv1i8( + %a = call @llvm.riscv.vrgather.vv.nxv1i8.i64( %0, %1, i64 %2) @@ -22,7 +22,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1i8.nxv1i8( +declare @llvm.riscv.vrgather.vv.mask.nxv1i8.i64( , , , @@ -36,7 +36,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1i8.nxv1i8( + %a = call @llvm.riscv.vrgather.vv.mask.nxv1i8.i64( %0, %1, %2, @@ -46,7 +46,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2i8.nxv2i8( +declare @llvm.riscv.vrgather.vv.nxv2i8.i64( , , i64); @@ -59,7 +59,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2i8.nxv2i8( + %a = call @llvm.riscv.vrgather.vv.nxv2i8.i64( %0, %1, i64 %2) @@ -67,7 +67,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2i8.nxv2i8( +declare @llvm.riscv.vrgather.vv.mask.nxv2i8.i64( , , , @@ -81,7 +81,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2i8.nxv2i8( + %a = call @llvm.riscv.vrgather.vv.mask.nxv2i8.i64( %0, %1, %2, @@ -91,7 +91,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4i8.nxv4i8( +declare @llvm.riscv.vrgather.vv.nxv4i8.i64( , , i64); @@ -104,7 +104,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4i8.nxv4i8( + %a = call @llvm.riscv.vrgather.vv.nxv4i8.i64( %0, %1, i64 %2) @@ -112,7 +112,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4i8.nxv4i8( +declare @llvm.riscv.vrgather.vv.mask.nxv4i8.i64( , , , @@ -126,7 +126,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4i8.nxv4i8( + %a = call @llvm.riscv.vrgather.vv.mask.nxv4i8.i64( %0, %1, %2, @@ -136,7 +136,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8i8.nxv8i8( +declare @llvm.riscv.vrgather.vv.nxv8i8.i64( , , i64); @@ -149,7 +149,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8i8.nxv8i8( + %a = call @llvm.riscv.vrgather.vv.nxv8i8.i64( %0, %1, i64 %2) @@ -157,7 +157,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8i8.nxv8i8( +declare @llvm.riscv.vrgather.vv.mask.nxv8i8.i64( , , , @@ -171,7 +171,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8i8.nxv8i8( + %a = call @llvm.riscv.vrgather.vv.mask.nxv8i8.i64( %0, %1, %2, @@ -181,7 +181,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv16i8.nxv16i8( +declare @llvm.riscv.vrgather.vv.nxv16i8.i64( , , i64); @@ -194,7 +194,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16i8.nxv16i8( + %a = call @llvm.riscv.vrgather.vv.nxv16i8.i64( %0, %1, i64 %2) @@ -202,7 +202,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv16i8.nxv16i8( +declare @llvm.riscv.vrgather.vv.mask.nxv16i8.i64( , , , @@ -216,7 +216,7 @@ ; CHECK-NEXT: vrgather.vv v8, v10, v12, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16i8.nxv16i8( + %a = call @llvm.riscv.vrgather.vv.mask.nxv16i8.i64( %0, %1, %2, @@ -226,7 +226,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv32i8.nxv32i8( +declare @llvm.riscv.vrgather.vv.nxv32i8.i64( , , i64); @@ -239,7 +239,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv32i8.nxv32i8( + %a = call @llvm.riscv.vrgather.vv.nxv32i8.i64( %0, %1, i64 %2) @@ -247,7 +247,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv32i8.nxv32i8( +declare @llvm.riscv.vrgather.vv.mask.nxv32i8.i64( , , , @@ -261,7 +261,7 @@ ; CHECK-NEXT: vrgather.vv v8, v12, v16, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv32i8.nxv32i8( + %a = call @llvm.riscv.vrgather.vv.mask.nxv32i8.i64( %0, %1, %2, @@ -271,7 +271,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv64i8.nxv64i8( +declare @llvm.riscv.vrgather.vv.nxv64i8.i64( , , i64); @@ -284,7 +284,7 @@ ; CHECK-NEXT: vmv8r.v v8, v24 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv64i8.nxv64i8( + %a = call @llvm.riscv.vrgather.vv.nxv64i8.i64( %0, %1, i64 %2) @@ -292,7 +292,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv64i8.nxv64i8( +declare @llvm.riscv.vrgather.vv.mask.nxv64i8.i64( , , , @@ -308,7 +308,7 @@ ; CHECK-NEXT: vrgather.vv v8, v16, v24, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv64i8.nxv64i8( + %a = call @llvm.riscv.vrgather.vv.mask.nxv64i8.i64( %0, %1, %2, @@ -318,7 +318,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv1i16.nxv1i16( +declare @llvm.riscv.vrgather.vv.nxv1i16.i64( , , i64); @@ -331,7 +331,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1i16.nxv1i16( + %a = call @llvm.riscv.vrgather.vv.nxv1i16.i64( %0, %1, i64 %2) @@ -339,7 +339,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1i16.nxv1i16( +declare @llvm.riscv.vrgather.vv.mask.nxv1i16.i64( , , , @@ -353,7 +353,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1i16.nxv1i16( + %a = call @llvm.riscv.vrgather.vv.mask.nxv1i16.i64( %0, %1, %2, @@ -363,7 +363,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2i16.nxv2i16( +declare @llvm.riscv.vrgather.vv.nxv2i16.i64( , , i64); @@ -376,7 +376,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2i16.nxv2i16( + %a = call @llvm.riscv.vrgather.vv.nxv2i16.i64( %0, %1, i64 %2) @@ -384,7 +384,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2i16.nxv2i16( +declare @llvm.riscv.vrgather.vv.mask.nxv2i16.i64( , , , @@ -398,7 +398,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2i16.nxv2i16( + %a = call @llvm.riscv.vrgather.vv.mask.nxv2i16.i64( %0, %1, %2, @@ -408,7 +408,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4i16.nxv4i16( +declare @llvm.riscv.vrgather.vv.nxv4i16.i64( , , i64); @@ -421,7 +421,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4i16.nxv4i16( + %a = call @llvm.riscv.vrgather.vv.nxv4i16.i64( %0, %1, i64 %2) @@ -429,7 +429,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4i16.nxv4i16( +declare @llvm.riscv.vrgather.vv.mask.nxv4i16.i64( , , , @@ -443,7 +443,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4i16.nxv4i16( + %a = call @llvm.riscv.vrgather.vv.mask.nxv4i16.i64( %0, %1, %2, @@ -453,7 +453,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8i16.nxv8i16( +declare @llvm.riscv.vrgather.vv.nxv8i16.i64( , , i64); @@ -466,7 +466,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8i16.nxv8i16( + %a = call @llvm.riscv.vrgather.vv.nxv8i16.i64( %0, %1, i64 %2) @@ -474,7 +474,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8i16.nxv8i16( +declare @llvm.riscv.vrgather.vv.mask.nxv8i16.i64( , , , @@ -488,7 +488,7 @@ ; CHECK-NEXT: vrgather.vv v8, v10, v12, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8i16.nxv8i16( + %a = call @llvm.riscv.vrgather.vv.mask.nxv8i16.i64( %0, %1, %2, @@ -498,7 +498,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv16i16.nxv16i16( +declare @llvm.riscv.vrgather.vv.nxv16i16.i64( , , i64); @@ -511,7 +511,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16i16.nxv16i16( + %a = call @llvm.riscv.vrgather.vv.nxv16i16.i64( %0, %1, i64 %2) @@ -519,7 +519,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv16i16.nxv16i16( +declare @llvm.riscv.vrgather.vv.mask.nxv16i16.i64( , , , @@ -533,7 +533,7 @@ ; CHECK-NEXT: vrgather.vv v8, v12, v16, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16i16.nxv16i16( + %a = call @llvm.riscv.vrgather.vv.mask.nxv16i16.i64( %0, %1, %2, @@ -543,7 +543,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv32i16.nxv32i16( +declare @llvm.riscv.vrgather.vv.nxv32i16.i64( , , i64); @@ -556,7 +556,7 @@ ; CHECK-NEXT: vmv8r.v v8, v24 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv32i16.nxv32i16( + %a = call @llvm.riscv.vrgather.vv.nxv32i16.i64( %0, %1, i64 %2) @@ -564,7 +564,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv32i16.nxv32i16( +declare @llvm.riscv.vrgather.vv.mask.nxv32i16.i64( , , , @@ -580,7 +580,7 @@ ; CHECK-NEXT: vrgather.vv v8, v16, v24, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv32i16.nxv32i16( + %a = call @llvm.riscv.vrgather.vv.mask.nxv32i16.i64( %0, %1, %2, @@ -590,7 +590,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv1i32.nxv1i32( +declare @llvm.riscv.vrgather.vv.nxv1i32.i64( , , i64); @@ -603,7 +603,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1i32.nxv1i32( + %a = call @llvm.riscv.vrgather.vv.nxv1i32.i64( %0, %1, i64 %2) @@ -611,7 +611,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1i32.nxv1i32( +declare @llvm.riscv.vrgather.vv.mask.nxv1i32.i64( , , , @@ -625,7 +625,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1i32.nxv1i32( + %a = call @llvm.riscv.vrgather.vv.mask.nxv1i32.i64( %0, %1, %2, @@ -635,7 +635,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2i32.nxv2i32( +declare @llvm.riscv.vrgather.vv.nxv2i32.i64( , , i64); @@ -648,7 +648,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2i32.nxv2i32( + %a = call @llvm.riscv.vrgather.vv.nxv2i32.i64( %0, %1, i64 %2) @@ -656,7 +656,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2i32.nxv2i32( +declare @llvm.riscv.vrgather.vv.mask.nxv2i32.i64( , , , @@ -670,7 +670,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2i32.nxv2i32( + %a = call @llvm.riscv.vrgather.vv.mask.nxv2i32.i64( %0, %1, %2, @@ -680,7 +680,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4i32.nxv4i32( +declare @llvm.riscv.vrgather.vv.nxv4i32.i64( , , i64); @@ -693,7 +693,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4i32.nxv4i32( + %a = call @llvm.riscv.vrgather.vv.nxv4i32.i64( %0, %1, i64 %2) @@ -701,7 +701,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4i32.nxv4i32( +declare @llvm.riscv.vrgather.vv.mask.nxv4i32.i64( , , , @@ -715,7 +715,7 @@ ; CHECK-NEXT: vrgather.vv v8, v10, v12, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4i32.nxv4i32( + %a = call @llvm.riscv.vrgather.vv.mask.nxv4i32.i64( %0, %1, %2, @@ -725,7 +725,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8i32.nxv8i32( +declare @llvm.riscv.vrgather.vv.nxv8i32.i64( , , i64); @@ -738,7 +738,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8i32.nxv8i32( + %a = call @llvm.riscv.vrgather.vv.nxv8i32.i64( %0, %1, i64 %2) @@ -746,7 +746,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8i32.nxv8i32( +declare @llvm.riscv.vrgather.vv.mask.nxv8i32.i64( , , , @@ -760,7 +760,7 @@ ; CHECK-NEXT: vrgather.vv v8, v12, v16, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8i32.nxv8i32( + %a = call @llvm.riscv.vrgather.vv.mask.nxv8i32.i64( %0, %1, %2, @@ -770,7 +770,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv16i32.nxv16i32( +declare @llvm.riscv.vrgather.vv.nxv16i32.i64( , , i64); @@ -783,7 +783,7 @@ ; CHECK-NEXT: vmv8r.v v8, v24 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16i32.nxv16i32( + %a = call @llvm.riscv.vrgather.vv.nxv16i32.i64( %0, %1, i64 %2) @@ -791,7 +791,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv16i32.nxv16i32( +declare @llvm.riscv.vrgather.vv.mask.nxv16i32.i64( , , , @@ -807,7 +807,7 @@ ; CHECK-NEXT: vrgather.vv v8, v16, v24, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16i32.nxv16i32( + %a = call @llvm.riscv.vrgather.vv.mask.nxv16i32.i64( %0, %1, %2, @@ -817,7 +817,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv1i64.nxv1i64( +declare @llvm.riscv.vrgather.vv.nxv1i64.i64( , , i64); @@ -830,7 +830,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1i64.nxv1i64( + %a = call @llvm.riscv.vrgather.vv.nxv1i64.i64( %0, %1, i64 %2) @@ -838,7 +838,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1i64.nxv1i64( +declare @llvm.riscv.vrgather.vv.mask.nxv1i64.i64( , , , @@ -852,7 +852,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1i64.nxv1i64( + %a = call @llvm.riscv.vrgather.vv.mask.nxv1i64.i64( %0, %1, %2, @@ -862,7 +862,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2i64.nxv2i64( +declare @llvm.riscv.vrgather.vv.nxv2i64.i64( , , i64); @@ -875,7 +875,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2i64.nxv2i64( + %a = call @llvm.riscv.vrgather.vv.nxv2i64.i64( %0, %1, i64 %2) @@ -883,7 +883,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2i64.nxv2i64( +declare @llvm.riscv.vrgather.vv.mask.nxv2i64.i64( , , , @@ -897,7 +897,7 @@ ; CHECK-NEXT: vrgather.vv v8, v10, v12, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2i64.nxv2i64( + %a = call @llvm.riscv.vrgather.vv.mask.nxv2i64.i64( %0, %1, %2, @@ -907,7 +907,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4i64.nxv4i64( +declare @llvm.riscv.vrgather.vv.nxv4i64.i64( , , i64); @@ -920,7 +920,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4i64.nxv4i64( + %a = call @llvm.riscv.vrgather.vv.nxv4i64.i64( %0, %1, i64 %2) @@ -928,7 +928,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4i64.nxv4i64( +declare @llvm.riscv.vrgather.vv.mask.nxv4i64.i64( , , , @@ -942,7 +942,7 @@ ; CHECK-NEXT: vrgather.vv v8, v12, v16, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4i64.nxv4i64( + %a = call @llvm.riscv.vrgather.vv.mask.nxv4i64.i64( %0, %1, %2, @@ -952,7 +952,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8i64.nxv8i64( +declare @llvm.riscv.vrgather.vv.nxv8i64.i64( , , i64); @@ -965,7 +965,7 @@ ; CHECK-NEXT: vmv8r.v v8, v24 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8i64.nxv8i64( + %a = call @llvm.riscv.vrgather.vv.nxv8i64.i64( %0, %1, i64 %2) @@ -973,7 +973,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8i64.nxv8i64( +declare @llvm.riscv.vrgather.vv.mask.nxv8i64.i64( , , , @@ -989,7 +989,7 @@ ; CHECK-NEXT: vrgather.vv v8, v16, v24, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8i64.nxv8i64( + %a = call @llvm.riscv.vrgather.vv.mask.nxv8i64.i64( %0, %1, %2, @@ -999,7 +999,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv1f16.nxv1i16( +declare @llvm.riscv.vrgather.vv.nxv1f16.i64( , , i64); @@ -1012,7 +1012,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1f16.nxv1i16( + %a = call @llvm.riscv.vrgather.vv.nxv1f16.i64( %0, %1, i64 %2) @@ -1020,7 +1020,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1f16.nxv1i16( +declare @llvm.riscv.vrgather.vv.mask.nxv1f16.i64( , , , @@ -1034,7 +1034,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1f16.nxv1i16( + %a = call @llvm.riscv.vrgather.vv.mask.nxv1f16.i64( %0, %1, %2, @@ -1044,7 +1044,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2f16.nxv2i16( +declare @llvm.riscv.vrgather.vv.nxv2f16.i64( , , i64); @@ -1057,7 +1057,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2f16.nxv2i16( + %a = call @llvm.riscv.vrgather.vv.nxv2f16.i64( %0, %1, i64 %2) @@ -1065,7 +1065,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2f16.nxv2i16( +declare @llvm.riscv.vrgather.vv.mask.nxv2f16.i64( , , , @@ -1079,7 +1079,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2f16.nxv2i16( + %a = call @llvm.riscv.vrgather.vv.mask.nxv2f16.i64( %0, %1, %2, @@ -1089,7 +1089,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4f16.nxv4i16( +declare @llvm.riscv.vrgather.vv.nxv4f16.i64( , , i64); @@ -1102,7 +1102,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4f16.nxv4i16( + %a = call @llvm.riscv.vrgather.vv.nxv4f16.i64( %0, %1, i64 %2) @@ -1110,7 +1110,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4f16.nxv4i16( +declare @llvm.riscv.vrgather.vv.mask.nxv4f16.i64( , , , @@ -1124,7 +1124,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4f16.nxv4i16( + %a = call @llvm.riscv.vrgather.vv.mask.nxv4f16.i64( %0, %1, %2, @@ -1134,7 +1134,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8f16.nxv8i16( +declare @llvm.riscv.vrgather.vv.nxv8f16.i64( , , i64); @@ -1147,7 +1147,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8f16.nxv8i16( + %a = call @llvm.riscv.vrgather.vv.nxv8f16.i64( %0, %1, i64 %2) @@ -1155,7 +1155,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8f16.nxv8i16( +declare @llvm.riscv.vrgather.vv.mask.nxv8f16.i64( , , , @@ -1169,7 +1169,7 @@ ; CHECK-NEXT: vrgather.vv v8, v10, v12, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8f16.nxv8i16( + %a = call @llvm.riscv.vrgather.vv.mask.nxv8f16.i64( %0, %1, %2, @@ -1179,7 +1179,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv16f16.nxv16i16( +declare @llvm.riscv.vrgather.vv.nxv16f16.i64( , , i64); @@ -1192,7 +1192,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16f16.nxv16i16( + %a = call @llvm.riscv.vrgather.vv.nxv16f16.i64( %0, %1, i64 %2) @@ -1200,7 +1200,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv16f16.nxv16i16( +declare @llvm.riscv.vrgather.vv.mask.nxv16f16.i64( , , , @@ -1214,7 +1214,7 @@ ; CHECK-NEXT: vrgather.vv v8, v12, v16, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16f16.nxv16i16( + %a = call @llvm.riscv.vrgather.vv.mask.nxv16f16.i64( %0, %1, %2, @@ -1224,7 +1224,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv32f16.nxv32i16( +declare @llvm.riscv.vrgather.vv.nxv32f16.i64( , , i64); @@ -1237,7 +1237,7 @@ ; CHECK-NEXT: vmv8r.v v8, v24 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv32f16.nxv32i16( + %a = call @llvm.riscv.vrgather.vv.nxv32f16.i64( %0, %1, i64 %2) @@ -1245,7 +1245,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv32f16.nxv32i16( +declare @llvm.riscv.vrgather.vv.mask.nxv32f16.i64( , , , @@ -1261,7 +1261,7 @@ ; CHECK-NEXT: vrgather.vv v8, v16, v24, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv32f16.nxv32i16( + %a = call @llvm.riscv.vrgather.vv.mask.nxv32f16.i64( %0, %1, %2, @@ -1271,7 +1271,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv1f32.nxv1i32( +declare @llvm.riscv.vrgather.vv.nxv1f32.i64( , , i64); @@ -1284,7 +1284,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1f32.nxv1i32( + %a = call @llvm.riscv.vrgather.vv.nxv1f32.i64( %0, %1, i64 %2) @@ -1292,7 +1292,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1f32.nxv1i32( +declare @llvm.riscv.vrgather.vv.mask.nxv1f32.i64( , , , @@ -1306,7 +1306,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1f32.nxv1i32( + %a = call @llvm.riscv.vrgather.vv.mask.nxv1f32.i64( %0, %1, %2, @@ -1316,7 +1316,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2f32.nxv2i32( +declare @llvm.riscv.vrgather.vv.nxv2f32.i64( , , i64); @@ -1329,7 +1329,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2f32.nxv2i32( + %a = call @llvm.riscv.vrgather.vv.nxv2f32.i64( %0, %1, i64 %2) @@ -1337,7 +1337,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2f32.nxv2i32( +declare @llvm.riscv.vrgather.vv.mask.nxv2f32.i64( , , , @@ -1351,7 +1351,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2f32.nxv2i32( + %a = call @llvm.riscv.vrgather.vv.mask.nxv2f32.i64( %0, %1, %2, @@ -1361,7 +1361,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4f32.nxv4i32( +declare @llvm.riscv.vrgather.vv.nxv4f32.i64( , , i64); @@ -1374,7 +1374,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4f32.nxv4i32( + %a = call @llvm.riscv.vrgather.vv.nxv4f32.i64( %0, %1, i64 %2) @@ -1382,7 +1382,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4f32.nxv4i32( +declare @llvm.riscv.vrgather.vv.mask.nxv4f32.i64( , , , @@ -1396,7 +1396,7 @@ ; CHECK-NEXT: vrgather.vv v8, v10, v12, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4f32.nxv4i32( + %a = call @llvm.riscv.vrgather.vv.mask.nxv4f32.i64( %0, %1, %2, @@ -1406,7 +1406,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8f32.nxv8i32( +declare @llvm.riscv.vrgather.vv.nxv8f32.i64( , , i64); @@ -1419,7 +1419,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8f32.nxv8i32( + %a = call @llvm.riscv.vrgather.vv.nxv8f32.i64( %0, %1, i64 %2) @@ -1427,7 +1427,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8f32.nxv8i32( +declare @llvm.riscv.vrgather.vv.mask.nxv8f32.i64( , , , @@ -1441,7 +1441,7 @@ ; CHECK-NEXT: vrgather.vv v8, v12, v16, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8f32.nxv8i32( + %a = call @llvm.riscv.vrgather.vv.mask.nxv8f32.i64( %0, %1, %2, @@ -1451,7 +1451,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv16f32.nxv16i32( +declare @llvm.riscv.vrgather.vv.nxv16f32.i64( , , i64); @@ -1464,7 +1464,7 @@ ; CHECK-NEXT: vmv8r.v v8, v24 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16f32.nxv16i32( + %a = call @llvm.riscv.vrgather.vv.nxv16f32.i64( %0, %1, i64 %2) @@ -1472,7 +1472,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv16f32.nxv16i32( +declare @llvm.riscv.vrgather.vv.mask.nxv16f32.i64( , , , @@ -1488,7 +1488,7 @@ ; CHECK-NEXT: vrgather.vv v8, v16, v24, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16f32.nxv16i32( + %a = call @llvm.riscv.vrgather.vv.mask.nxv16f32.i64( %0, %1, %2, @@ -1498,7 +1498,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv1f64.nxv1i64( +declare @llvm.riscv.vrgather.vv.nxv1f64.i64( , , i64); @@ -1511,7 +1511,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1f64.nxv1i64( + %a = call @llvm.riscv.vrgather.vv.nxv1f64.i64( %0, %1, i64 %2) @@ -1519,7 +1519,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1f64.nxv1i64( +declare @llvm.riscv.vrgather.vv.mask.nxv1f64.i64( , , , @@ -1533,7 +1533,7 @@ ; CHECK-NEXT: vrgather.vv v8, v9, v10, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1f64.nxv1i64( + %a = call @llvm.riscv.vrgather.vv.mask.nxv1f64.i64( %0, %1, %2, @@ -1543,7 +1543,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2f64.nxv2i64( +declare @llvm.riscv.vrgather.vv.nxv2f64.i64( , , i64); @@ -1556,7 +1556,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2f64.nxv2i64( + %a = call @llvm.riscv.vrgather.vv.nxv2f64.i64( %0, %1, i64 %2) @@ -1564,7 +1564,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2f64.nxv2i64( +declare @llvm.riscv.vrgather.vv.mask.nxv2f64.i64( , , , @@ -1578,7 +1578,7 @@ ; CHECK-NEXT: vrgather.vv v8, v10, v12, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2f64.nxv2i64( + %a = call @llvm.riscv.vrgather.vv.mask.nxv2f64.i64( %0, %1, %2, @@ -1588,7 +1588,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4f64.nxv4i64( +declare @llvm.riscv.vrgather.vv.nxv4f64.i64( , , i64); @@ -1601,7 +1601,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4f64.nxv4i64( + %a = call @llvm.riscv.vrgather.vv.nxv4f64.i64( %0, %1, i64 %2) @@ -1609,7 +1609,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4f64.nxv4i64( +declare @llvm.riscv.vrgather.vv.mask.nxv4f64.i64( , , , @@ -1623,7 +1623,7 @@ ; CHECK-NEXT: vrgather.vv v8, v12, v16, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4f64.nxv4i64( + %a = call @llvm.riscv.vrgather.vv.mask.nxv4f64.i64( %0, %1, %2, @@ -1633,7 +1633,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8f64.nxv8i64( +declare @llvm.riscv.vrgather.vv.nxv8f64.i64( , , i64); @@ -1646,7 +1646,7 @@ ; CHECK-NEXT: vmv8r.v v8, v24 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8f64.nxv8i64( + %a = call @llvm.riscv.vrgather.vv.nxv8f64.i64( %0, %1, i64 %2) @@ -1654,7 +1654,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8f64.nxv8i64( +declare @llvm.riscv.vrgather.vv.mask.nxv8f64.i64( , , , @@ -1670,7 +1670,7 @@ ; CHECK-NEXT: vrgather.vv v8, v16, v24, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8f64.nxv8i64( + %a = call @llvm.riscv.vrgather.vv.mask.nxv8f64.i64( %0, %1, %2, @@ -1680,7 +1680,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv1i8.i64( +declare @llvm.riscv.vrgather.vx.nxv1i8.i64( , i64, i64); @@ -1693,7 +1693,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1i8.i64( + %a = call @llvm.riscv.vrgather.vx.nxv1i8.i64( %0, i64 %1, i64 %2) @@ -1701,7 +1701,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1i8.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv1i8.i64( , , i64, @@ -1715,7 +1715,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1i8.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1i8.i64( %0, %1, i64 %2, @@ -1725,7 +1725,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2i8.i64( +declare @llvm.riscv.vrgather.vx.nxv2i8.i64( , i64, i64); @@ -1738,7 +1738,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2i8.i64( + %a = call @llvm.riscv.vrgather.vx.nxv2i8.i64( %0, i64 %1, i64 %2) @@ -1746,7 +1746,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2i8.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv2i8.i64( , , i64, @@ -1760,7 +1760,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2i8.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2i8.i64( %0, %1, i64 %2, @@ -1770,7 +1770,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4i8.i64( +declare @llvm.riscv.vrgather.vx.nxv4i8.i64( , i64, i64); @@ -1783,7 +1783,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4i8.i64( + %a = call @llvm.riscv.vrgather.vx.nxv4i8.i64( %0, i64 %1, i64 %2) @@ -1791,7 +1791,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4i8.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv4i8.i64( , , i64, @@ -1805,7 +1805,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4i8.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4i8.i64( %0, %1, i64 %2, @@ -1815,7 +1815,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8i8.i64( +declare @llvm.riscv.vrgather.vx.nxv8i8.i64( , i64, i64); @@ -1828,7 +1828,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8i8.i64( + %a = call @llvm.riscv.vrgather.vx.nxv8i8.i64( %0, i64 %1, i64 %2) @@ -1836,7 +1836,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8i8.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv8i8.i64( , , i64, @@ -1850,7 +1850,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8i8.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8i8.i64( %0, %1, i64 %2, @@ -1860,7 +1860,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv16i8.i64( +declare @llvm.riscv.vrgather.vx.nxv16i8.i64( , i64, i64); @@ -1873,7 +1873,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16i8.i64( + %a = call @llvm.riscv.vrgather.vx.nxv16i8.i64( %0, i64 %1, i64 %2) @@ -1881,7 +1881,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv16i8.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv16i8.i64( , , i64, @@ -1895,7 +1895,7 @@ ; CHECK-NEXT: vrgather.vx v8, v10, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16i8.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv16i8.i64( %0, %1, i64 %2, @@ -1905,7 +1905,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv32i8.i64( +declare @llvm.riscv.vrgather.vx.nxv32i8.i64( , i64, i64); @@ -1918,7 +1918,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv32i8.i64( + %a = call @llvm.riscv.vrgather.vx.nxv32i8.i64( %0, i64 %1, i64 %2) @@ -1926,7 +1926,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv32i8.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv32i8.i64( , , i64, @@ -1940,7 +1940,7 @@ ; CHECK-NEXT: vrgather.vx v8, v12, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv32i8.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv32i8.i64( %0, %1, i64 %2, @@ -1950,7 +1950,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv64i8.i64( +declare @llvm.riscv.vrgather.vx.nxv64i8.i64( , i64, i64); @@ -1963,7 +1963,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv64i8.i64( + %a = call @llvm.riscv.vrgather.vx.nxv64i8.i64( %0, i64 %1, i64 %2) @@ -1971,7 +1971,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv64i8.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv64i8.i64( , , i64, @@ -1985,7 +1985,7 @@ ; CHECK-NEXT: vrgather.vx v8, v16, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv64i8.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv64i8.i64( %0, %1, i64 %2, @@ -1995,7 +1995,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv1i16.i64( +declare @llvm.riscv.vrgather.vx.nxv1i16.i64( , i64, i64); @@ -2008,7 +2008,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1i16.i64( + %a = call @llvm.riscv.vrgather.vx.nxv1i16.i64( %0, i64 %1, i64 %2) @@ -2016,7 +2016,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1i16.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv1i16.i64( , , i64, @@ -2030,7 +2030,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1i16.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1i16.i64( %0, %1, i64 %2, @@ -2040,7 +2040,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2i16.i64( +declare @llvm.riscv.vrgather.vx.nxv2i16.i64( , i64, i64); @@ -2053,7 +2053,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2i16.i64( + %a = call @llvm.riscv.vrgather.vx.nxv2i16.i64( %0, i64 %1, i64 %2) @@ -2061,7 +2061,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2i16.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv2i16.i64( , , i64, @@ -2075,7 +2075,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2i16.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2i16.i64( %0, %1, i64 %2, @@ -2085,7 +2085,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4i16.i64( +declare @llvm.riscv.vrgather.vx.nxv4i16.i64( , i64, i64); @@ -2098,7 +2098,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4i16.i64( + %a = call @llvm.riscv.vrgather.vx.nxv4i16.i64( %0, i64 %1, i64 %2) @@ -2106,7 +2106,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4i16.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv4i16.i64( , , i64, @@ -2120,7 +2120,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4i16.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4i16.i64( %0, %1, i64 %2, @@ -2130,7 +2130,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8i16.i64( +declare @llvm.riscv.vrgather.vx.nxv8i16.i64( , i64, i64); @@ -2143,7 +2143,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8i16.i64( + %a = call @llvm.riscv.vrgather.vx.nxv8i16.i64( %0, i64 %1, i64 %2) @@ -2151,7 +2151,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8i16.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv8i16.i64( , , i64, @@ -2165,7 +2165,7 @@ ; CHECK-NEXT: vrgather.vx v8, v10, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8i16.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8i16.i64( %0, %1, i64 %2, @@ -2175,7 +2175,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv16i16.i64( +declare @llvm.riscv.vrgather.vx.nxv16i16.i64( , i64, i64); @@ -2188,7 +2188,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16i16.i64( + %a = call @llvm.riscv.vrgather.vx.nxv16i16.i64( %0, i64 %1, i64 %2) @@ -2196,7 +2196,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv16i16.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv16i16.i64( , , i64, @@ -2210,7 +2210,7 @@ ; CHECK-NEXT: vrgather.vx v8, v12, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16i16.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv16i16.i64( %0, %1, i64 %2, @@ -2220,7 +2220,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv32i16.i64( +declare @llvm.riscv.vrgather.vx.nxv32i16.i64( , i64, i64); @@ -2233,7 +2233,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv32i16.i64( + %a = call @llvm.riscv.vrgather.vx.nxv32i16.i64( %0, i64 %1, i64 %2) @@ -2241,7 +2241,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv32i16.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv32i16.i64( , , i64, @@ -2255,7 +2255,7 @@ ; CHECK-NEXT: vrgather.vx v8, v16, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv32i16.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv32i16.i64( %0, %1, i64 %2, @@ -2265,7 +2265,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv1i32.i64( +declare @llvm.riscv.vrgather.vx.nxv1i32.i64( , i64, i64); @@ -2278,7 +2278,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1i32.i64( + %a = call @llvm.riscv.vrgather.vx.nxv1i32.i64( %0, i64 %1, i64 %2) @@ -2286,7 +2286,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1i32.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv1i32.i64( , , i64, @@ -2300,7 +2300,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1i32.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1i32.i64( %0, %1, i64 %2, @@ -2310,7 +2310,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2i32.i64( +declare @llvm.riscv.vrgather.vx.nxv2i32.i64( , i64, i64); @@ -2323,7 +2323,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2i32.i64( + %a = call @llvm.riscv.vrgather.vx.nxv2i32.i64( %0, i64 %1, i64 %2) @@ -2331,7 +2331,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2i32.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv2i32.i64( , , i64, @@ -2345,7 +2345,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2i32.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2i32.i64( %0, %1, i64 %2, @@ -2355,7 +2355,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4i32.i64( +declare @llvm.riscv.vrgather.vx.nxv4i32.i64( , i64, i64); @@ -2368,7 +2368,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4i32.i64( + %a = call @llvm.riscv.vrgather.vx.nxv4i32.i64( %0, i64 %1, i64 %2) @@ -2376,7 +2376,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4i32.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv4i32.i64( , , i64, @@ -2390,7 +2390,7 @@ ; CHECK-NEXT: vrgather.vx v8, v10, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4i32.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4i32.i64( %0, %1, i64 %2, @@ -2400,7 +2400,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8i32.i64( +declare @llvm.riscv.vrgather.vx.nxv8i32.i64( , i64, i64); @@ -2413,7 +2413,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8i32.i64( + %a = call @llvm.riscv.vrgather.vx.nxv8i32.i64( %0, i64 %1, i64 %2) @@ -2421,7 +2421,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8i32.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv8i32.i64( , , i64, @@ -2435,7 +2435,7 @@ ; CHECK-NEXT: vrgather.vx v8, v12, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8i32.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8i32.i64( %0, %1, i64 %2, @@ -2445,7 +2445,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv16i32.i64( +declare @llvm.riscv.vrgather.vx.nxv16i32.i64( , i64, i64); @@ -2458,7 +2458,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16i32.i64( + %a = call @llvm.riscv.vrgather.vx.nxv16i32.i64( %0, i64 %1, i64 %2) @@ -2466,7 +2466,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv16i32.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv16i32.i64( , , i64, @@ -2480,7 +2480,7 @@ ; CHECK-NEXT: vrgather.vx v8, v16, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16i32.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv16i32.i64( %0, %1, i64 %2, @@ -2490,7 +2490,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv1i64.i64( +declare @llvm.riscv.vrgather.vx.nxv1i64.i64( , i64, i64); @@ -2503,7 +2503,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1i64.i64( + %a = call @llvm.riscv.vrgather.vx.nxv1i64.i64( %0, i64 %1, i64 %2) @@ -2511,7 +2511,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1i64.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv1i64.i64( , , i64, @@ -2525,7 +2525,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1i64.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1i64.i64( %0, %1, i64 %2, @@ -2535,7 +2535,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2i64.i64( +declare @llvm.riscv.vrgather.vx.nxv2i64.i64( , i64, i64); @@ -2548,7 +2548,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2i64.i64( + %a = call @llvm.riscv.vrgather.vx.nxv2i64.i64( %0, i64 %1, i64 %2) @@ -2556,7 +2556,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2i64.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv2i64.i64( , , i64, @@ -2570,7 +2570,7 @@ ; CHECK-NEXT: vrgather.vx v8, v10, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2i64.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2i64.i64( %0, %1, i64 %2, @@ -2580,7 +2580,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4i64.i64( +declare @llvm.riscv.vrgather.vx.nxv4i64.i64( , i64, i64); @@ -2593,7 +2593,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4i64.i64( + %a = call @llvm.riscv.vrgather.vx.nxv4i64.i64( %0, i64 %1, i64 %2) @@ -2601,7 +2601,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4i64.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv4i64.i64( , , i64, @@ -2615,7 +2615,7 @@ ; CHECK-NEXT: vrgather.vx v8, v12, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4i64.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4i64.i64( %0, %1, i64 %2, @@ -2625,7 +2625,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8i64.i64( +declare @llvm.riscv.vrgather.vx.nxv8i64.i64( , i64, i64); @@ -2638,7 +2638,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8i64.i64( + %a = call @llvm.riscv.vrgather.vx.nxv8i64.i64( %0, i64 %1, i64 %2) @@ -2646,7 +2646,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8i64.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv8i64.i64( , , i64, @@ -2660,7 +2660,7 @@ ; CHECK-NEXT: vrgather.vx v8, v16, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8i64.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8i64.i64( %0, %1, i64 %2, @@ -2670,7 +2670,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv1f16.i64( +declare @llvm.riscv.vrgather.vx.nxv1f16.i64( , i64, i64); @@ -2683,7 +2683,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1f16.i64( + %a = call @llvm.riscv.vrgather.vx.nxv1f16.i64( %0, i64 %1, i64 %2) @@ -2691,7 +2691,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1f16.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv1f16.i64( , , i64, @@ -2705,7 +2705,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1f16.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1f16.i64( %0, %1, i64 %2, @@ -2715,7 +2715,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2f16.i64( +declare @llvm.riscv.vrgather.vx.nxv2f16.i64( , i64, i64); @@ -2728,7 +2728,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2f16.i64( + %a = call @llvm.riscv.vrgather.vx.nxv2f16.i64( %0, i64 %1, i64 %2) @@ -2736,7 +2736,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2f16.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv2f16.i64( , , i64, @@ -2750,7 +2750,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2f16.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2f16.i64( %0, %1, i64 %2, @@ -2760,7 +2760,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4f16.i64( +declare @llvm.riscv.vrgather.vx.nxv4f16.i64( , i64, i64); @@ -2773,7 +2773,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4f16.i64( + %a = call @llvm.riscv.vrgather.vx.nxv4f16.i64( %0, i64 %1, i64 %2) @@ -2781,7 +2781,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4f16.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv4f16.i64( , , i64, @@ -2795,7 +2795,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4f16.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4f16.i64( %0, %1, i64 %2, @@ -2805,7 +2805,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8f16.i64( +declare @llvm.riscv.vrgather.vx.nxv8f16.i64( , i64, i64); @@ -2818,7 +2818,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8f16.i64( + %a = call @llvm.riscv.vrgather.vx.nxv8f16.i64( %0, i64 %1, i64 %2) @@ -2826,7 +2826,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8f16.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv8f16.i64( , , i64, @@ -2840,7 +2840,7 @@ ; CHECK-NEXT: vrgather.vx v8, v10, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8f16.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8f16.i64( %0, %1, i64 %2, @@ -2850,7 +2850,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv16f16.i64( +declare @llvm.riscv.vrgather.vx.nxv16f16.i64( , i64, i64); @@ -2863,7 +2863,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16f16.i64( + %a = call @llvm.riscv.vrgather.vx.nxv16f16.i64( %0, i64 %1, i64 %2) @@ -2871,7 +2871,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv16f16.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv16f16.i64( , , i64, @@ -2885,7 +2885,7 @@ ; CHECK-NEXT: vrgather.vx v8, v12, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16f16.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv16f16.i64( %0, %1, i64 %2, @@ -2895,7 +2895,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv32f16.i64( +declare @llvm.riscv.vrgather.vx.nxv32f16.i64( , i64, i64); @@ -2908,7 +2908,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv32f16.i64( + %a = call @llvm.riscv.vrgather.vx.nxv32f16.i64( %0, i64 %1, i64 %2) @@ -2916,7 +2916,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv32f16.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv32f16.i64( , , i64, @@ -2930,7 +2930,7 @@ ; CHECK-NEXT: vrgather.vx v8, v16, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv32f16.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv32f16.i64( %0, %1, i64 %2, @@ -2940,7 +2940,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv1f32.i64( +declare @llvm.riscv.vrgather.vx.nxv1f32.i64( , i64, i64); @@ -2953,7 +2953,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1f32.i64( + %a = call @llvm.riscv.vrgather.vx.nxv1f32.i64( %0, i64 %1, i64 %2) @@ -2961,7 +2961,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1f32.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv1f32.i64( , , i64, @@ -2975,7 +2975,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1f32.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1f32.i64( %0, %1, i64 %2, @@ -2985,7 +2985,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2f32.i64( +declare @llvm.riscv.vrgather.vx.nxv2f32.i64( , i64, i64); @@ -2998,7 +2998,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2f32.i64( + %a = call @llvm.riscv.vrgather.vx.nxv2f32.i64( %0, i64 %1, i64 %2) @@ -3006,7 +3006,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2f32.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv2f32.i64( , , i64, @@ -3020,7 +3020,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2f32.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2f32.i64( %0, %1, i64 %2, @@ -3030,7 +3030,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4f32.i64( +declare @llvm.riscv.vrgather.vx.nxv4f32.i64( , i64, i64); @@ -3043,7 +3043,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4f32.i64( + %a = call @llvm.riscv.vrgather.vx.nxv4f32.i64( %0, i64 %1, i64 %2) @@ -3051,7 +3051,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4f32.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv4f32.i64( , , i64, @@ -3065,7 +3065,7 @@ ; CHECK-NEXT: vrgather.vx v8, v10, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4f32.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4f32.i64( %0, %1, i64 %2, @@ -3075,7 +3075,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8f32.i64( +declare @llvm.riscv.vrgather.vx.nxv8f32.i64( , i64, i64); @@ -3088,7 +3088,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8f32.i64( + %a = call @llvm.riscv.vrgather.vx.nxv8f32.i64( %0, i64 %1, i64 %2) @@ -3096,7 +3096,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8f32.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv8f32.i64( , , i64, @@ -3110,7 +3110,7 @@ ; CHECK-NEXT: vrgather.vx v8, v12, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8f32.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8f32.i64( %0, %1, i64 %2, @@ -3120,7 +3120,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv16f32.i64( +declare @llvm.riscv.vrgather.vx.nxv16f32.i64( , i64, i64); @@ -3133,7 +3133,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16f32.i64( + %a = call @llvm.riscv.vrgather.vx.nxv16f32.i64( %0, i64 %1, i64 %2) @@ -3141,7 +3141,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv16f32.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv16f32.i64( , , i64, @@ -3155,7 +3155,7 @@ ; CHECK-NEXT: vrgather.vx v8, v16, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16f32.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv16f32.i64( %0, %1, i64 %2, @@ -3165,7 +3165,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv1f64.i64( +declare @llvm.riscv.vrgather.vx.nxv1f64.i64( , i64, i64); @@ -3178,7 +3178,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1f64.i64( + %a = call @llvm.riscv.vrgather.vx.nxv1f64.i64( %0, i64 %1, i64 %2) @@ -3186,7 +3186,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv1f64.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv1f64.i64( , , i64, @@ -3200,7 +3200,7 @@ ; CHECK-NEXT: vrgather.vx v8, v9, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1f64.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1f64.i64( %0, %1, i64 %2, @@ -3210,7 +3210,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv2f64.i64( +declare @llvm.riscv.vrgather.vx.nxv2f64.i64( , i64, i64); @@ -3223,7 +3223,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2f64.i64( + %a = call @llvm.riscv.vrgather.vx.nxv2f64.i64( %0, i64 %1, i64 %2) @@ -3231,7 +3231,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv2f64.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv2f64.i64( , , i64, @@ -3245,7 +3245,7 @@ ; CHECK-NEXT: vrgather.vx v8, v10, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2f64.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2f64.i64( %0, %1, i64 %2, @@ -3255,7 +3255,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv4f64.i64( +declare @llvm.riscv.vrgather.vx.nxv4f64.i64( , i64, i64); @@ -3268,7 +3268,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4f64.i64( + %a = call @llvm.riscv.vrgather.vx.nxv4f64.i64( %0, i64 %1, i64 %2) @@ -3276,7 +3276,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv4f64.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv4f64.i64( , , i64, @@ -3290,7 +3290,7 @@ ; CHECK-NEXT: vrgather.vx v8, v12, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4f64.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4f64.i64( %0, %1, i64 %2, @@ -3300,7 +3300,7 @@ ret %a } -declare @llvm.riscv.vrgather.nxv8f64.i64( +declare @llvm.riscv.vrgather.vx.nxv8f64.i64( , i64, i64); @@ -3313,7 +3313,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8f64.i64( + %a = call @llvm.riscv.vrgather.vx.nxv8f64.i64( %0, i64 %1, i64 %2) @@ -3321,7 +3321,7 @@ ret %a } -declare @llvm.riscv.vrgather.mask.nxv8f64.i64( +declare @llvm.riscv.vrgather.vx.mask.nxv8f64.i64( , , i64, @@ -3335,7 +3335,7 @@ ; CHECK-NEXT: vrgather.vx v8, v16, a0, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8f64.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8f64.i64( %0, %1, i64 %2, @@ -3353,7 +3353,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1i8.i64( + %a = call @llvm.riscv.vrgather.vx.nxv1i8.i64( %0, i64 9, i64 %1) @@ -3368,7 +3368,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1i8.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1i8.i64( %0, %1, i64 9, @@ -3386,7 +3386,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2i8.i64( + %a = call @llvm.riscv.vrgather.vx.nxv2i8.i64( %0, i64 9, i64 %1) @@ -3401,7 +3401,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2i8.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2i8.i64( %0, %1, i64 9, @@ -3419,7 +3419,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4i8.i64( + %a = call @llvm.riscv.vrgather.vx.nxv4i8.i64( %0, i64 9, i64 %1) @@ -3434,7 +3434,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4i8.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4i8.i64( %0, %1, i64 9, @@ -3452,7 +3452,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8i8.i64( + %a = call @llvm.riscv.vrgather.vx.nxv8i8.i64( %0, i64 9, i64 %1) @@ -3467,7 +3467,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8i8.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8i8.i64( %0, %1, i64 9, @@ -3485,7 +3485,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16i8.i64( + %a = call @llvm.riscv.vrgather.vx.nxv16i8.i64( %0, i64 9, i64 %1) @@ -3500,7 +3500,7 @@ ; CHECK-NEXT: vrgather.vi v8, v10, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16i8.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv16i8.i64( %0, %1, i64 9, @@ -3518,7 +3518,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv32i8.i64( + %a = call @llvm.riscv.vrgather.vx.nxv32i8.i64( %0, i64 9, i64 %1) @@ -3533,7 +3533,7 @@ ; CHECK-NEXT: vrgather.vi v8, v12, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv32i8.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv32i8.i64( %0, %1, i64 9, @@ -3551,7 +3551,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv64i8.i64( + %a = call @llvm.riscv.vrgather.vx.nxv64i8.i64( %0, i64 9, i64 %1) @@ -3566,7 +3566,7 @@ ; CHECK-NEXT: vrgather.vi v8, v16, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv64i8.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv64i8.i64( %0, %1, i64 9, @@ -3584,7 +3584,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1i16.i64( + %a = call @llvm.riscv.vrgather.vx.nxv1i16.i64( %0, i64 9, i64 %1) @@ -3599,7 +3599,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1i16.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1i16.i64( %0, %1, i64 9, @@ -3617,7 +3617,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2i16.i64( + %a = call @llvm.riscv.vrgather.vx.nxv2i16.i64( %0, i64 9, i64 %1) @@ -3632,7 +3632,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2i16.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2i16.i64( %0, %1, i64 9, @@ -3650,7 +3650,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4i16.i64( + %a = call @llvm.riscv.vrgather.vx.nxv4i16.i64( %0, i64 9, i64 %1) @@ -3665,7 +3665,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4i16.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4i16.i64( %0, %1, i64 9, @@ -3683,7 +3683,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8i16.i64( + %a = call @llvm.riscv.vrgather.vx.nxv8i16.i64( %0, i64 9, i64 %1) @@ -3698,7 +3698,7 @@ ; CHECK-NEXT: vrgather.vi v8, v10, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8i16.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8i16.i64( %0, %1, i64 9, @@ -3716,7 +3716,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16i16.i64( + %a = call @llvm.riscv.vrgather.vx.nxv16i16.i64( %0, i64 9, i64 %1) @@ -3731,7 +3731,7 @@ ; CHECK-NEXT: vrgather.vi v8, v12, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16i16.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv16i16.i64( %0, %1, i64 9, @@ -3749,7 +3749,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv32i16.i64( + %a = call @llvm.riscv.vrgather.vx.nxv32i16.i64( %0, i64 9, i64 %1) @@ -3764,7 +3764,7 @@ ; CHECK-NEXT: vrgather.vi v8, v16, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv32i16.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv32i16.i64( %0, %1, i64 9, @@ -3782,7 +3782,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1i32.i64( + %a = call @llvm.riscv.vrgather.vx.nxv1i32.i64( %0, i64 9, i64 %1) @@ -3797,7 +3797,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1i32.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1i32.i64( %0, %1, i64 9, @@ -3815,7 +3815,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2i32.i64( + %a = call @llvm.riscv.vrgather.vx.nxv2i32.i64( %0, i64 9, i64 %1) @@ -3830,7 +3830,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2i32.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2i32.i64( %0, %1, i64 9, @@ -3848,7 +3848,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4i32.i64( + %a = call @llvm.riscv.vrgather.vx.nxv4i32.i64( %0, i64 9, i64 %1) @@ -3863,7 +3863,7 @@ ; CHECK-NEXT: vrgather.vi v8, v10, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4i32.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4i32.i64( %0, %1, i64 9, @@ -3881,7 +3881,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8i32.i64( + %a = call @llvm.riscv.vrgather.vx.nxv8i32.i64( %0, i64 9, i64 %1) @@ -3896,7 +3896,7 @@ ; CHECK-NEXT: vrgather.vi v8, v12, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8i32.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8i32.i64( %0, %1, i64 9, @@ -3914,7 +3914,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16i32.i64( + %a = call @llvm.riscv.vrgather.vx.nxv16i32.i64( %0, i64 9, i64 %1) @@ -3929,7 +3929,7 @@ ; CHECK-NEXT: vrgather.vi v8, v16, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16i32.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv16i32.i64( %0, %1, i64 9, @@ -3947,7 +3947,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1i64.i64( + %a = call @llvm.riscv.vrgather.vx.nxv1i64.i64( %0, i64 9, i64 %1) @@ -3962,7 +3962,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1i64.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1i64.i64( %0, %1, i64 9, @@ -3980,7 +3980,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2i64.i64( + %a = call @llvm.riscv.vrgather.vx.nxv2i64.i64( %0, i64 9, i64 %1) @@ -3995,7 +3995,7 @@ ; CHECK-NEXT: vrgather.vi v8, v10, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2i64.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2i64.i64( %0, %1, i64 9, @@ -4013,7 +4013,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4i64.i64( + %a = call @llvm.riscv.vrgather.vx.nxv4i64.i64( %0, i64 9, i64 %1) @@ -4028,7 +4028,7 @@ ; CHECK-NEXT: vrgather.vi v8, v12, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4i64.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4i64.i64( %0, %1, i64 9, @@ -4046,7 +4046,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8i64.i64( + %a = call @llvm.riscv.vrgather.vx.nxv8i64.i64( %0, i64 9, i64 %1) @@ -4061,7 +4061,7 @@ ; CHECK-NEXT: vrgather.vi v8, v16, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8i64.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8i64.i64( %0, %1, i64 9, @@ -4079,7 +4079,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1f16.i64( + %a = call @llvm.riscv.vrgather.vx.nxv1f16.i64( %0, i64 9, i64 %1) @@ -4094,7 +4094,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1f16.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1f16.i64( %0, %1, i64 9, @@ -4112,7 +4112,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2f16.i64( + %a = call @llvm.riscv.vrgather.vx.nxv2f16.i64( %0, i64 9, i64 %1) @@ -4127,7 +4127,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2f16.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2f16.i64( %0, %1, i64 9, @@ -4145,7 +4145,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4f16.i64( + %a = call @llvm.riscv.vrgather.vx.nxv4f16.i64( %0, i64 9, i64 %1) @@ -4160,7 +4160,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4f16.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4f16.i64( %0, %1, i64 9, @@ -4178,7 +4178,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8f16.i64( + %a = call @llvm.riscv.vrgather.vx.nxv8f16.i64( %0, i64 9, i64 %1) @@ -4193,7 +4193,7 @@ ; CHECK-NEXT: vrgather.vi v8, v10, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8f16.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8f16.i64( %0, %1, i64 9, @@ -4211,7 +4211,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16f16.i64( + %a = call @llvm.riscv.vrgather.vx.nxv16f16.i64( %0, i64 9, i64 %1) @@ -4226,7 +4226,7 @@ ; CHECK-NEXT: vrgather.vi v8, v12, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16f16.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv16f16.i64( %0, %1, i64 9, @@ -4244,7 +4244,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv32f16.i64( + %a = call @llvm.riscv.vrgather.vx.nxv32f16.i64( %0, i64 9, i64 %1) @@ -4259,7 +4259,7 @@ ; CHECK-NEXT: vrgather.vi v8, v16, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv32f16.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv32f16.i64( %0, %1, i64 9, @@ -4277,7 +4277,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1f32.i64( + %a = call @llvm.riscv.vrgather.vx.nxv1f32.i64( %0, i64 9, i64 %1) @@ -4292,7 +4292,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1f32.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1f32.i64( %0, %1, i64 9, @@ -4310,7 +4310,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2f32.i64( + %a = call @llvm.riscv.vrgather.vx.nxv2f32.i64( %0, i64 9, i64 %1) @@ -4325,7 +4325,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2f32.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2f32.i64( %0, %1, i64 9, @@ -4343,7 +4343,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4f32.i64( + %a = call @llvm.riscv.vrgather.vx.nxv4f32.i64( %0, i64 9, i64 %1) @@ -4358,7 +4358,7 @@ ; CHECK-NEXT: vrgather.vi v8, v10, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4f32.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4f32.i64( %0, %1, i64 9, @@ -4376,7 +4376,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8f32.i64( + %a = call @llvm.riscv.vrgather.vx.nxv8f32.i64( %0, i64 9, i64 %1) @@ -4391,7 +4391,7 @@ ; CHECK-NEXT: vrgather.vi v8, v12, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8f32.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8f32.i64( %0, %1, i64 9, @@ -4409,7 +4409,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv16f32.i64( + %a = call @llvm.riscv.vrgather.vx.nxv16f32.i64( %0, i64 9, i64 %1) @@ -4424,7 +4424,7 @@ ; CHECK-NEXT: vrgather.vi v8, v16, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv16f32.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv16f32.i64( %0, %1, i64 9, @@ -4442,7 +4442,7 @@ ; CHECK-NEXT: vmv1r.v v8, v25 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv1f64.i64( + %a = call @llvm.riscv.vrgather.vx.nxv1f64.i64( %0, i64 9, i64 %1) @@ -4457,7 +4457,7 @@ ; CHECK-NEXT: vrgather.vi v8, v9, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv1f64.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv1f64.i64( %0, %1, i64 9, @@ -4475,7 +4475,7 @@ ; CHECK-NEXT: vmv2r.v v8, v26 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv2f64.i64( + %a = call @llvm.riscv.vrgather.vx.nxv2f64.i64( %0, i64 9, i64 %1) @@ -4490,7 +4490,7 @@ ; CHECK-NEXT: vrgather.vi v8, v10, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv2f64.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv2f64.i64( %0, %1, i64 9, @@ -4508,7 +4508,7 @@ ; CHECK-NEXT: vmv4r.v v8, v28 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv4f64.i64( + %a = call @llvm.riscv.vrgather.vx.nxv4f64.i64( %0, i64 9, i64 %1) @@ -4523,7 +4523,7 @@ ; CHECK-NEXT: vrgather.vi v8, v12, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv4f64.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv4f64.i64( %0, %1, i64 9, @@ -4541,7 +4541,7 @@ ; CHECK-NEXT: vmv8r.v v8, v16 ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.nxv8f64.i64( + %a = call @llvm.riscv.vrgather.vx.nxv8f64.i64( %0, i64 9, i64 %1) @@ -4556,7 +4556,7 @@ ; CHECK-NEXT: vrgather.vi v8, v16, 9, v0.t ; CHECK-NEXT: jalr zero, 0(ra) entry: - %a = call @llvm.riscv.vrgather.mask.nxv8f64.i64( + %a = call @llvm.riscv.vrgather.vx.mask.nxv8f64.i64( %0, %1, i64 9,