diff --git a/llvm/include/llvm/IR/IntrinsicsAArch64.td b/llvm/include/llvm/IR/IntrinsicsAArch64.td --- a/llvm/include/llvm/IR/IntrinsicsAArch64.td +++ b/llvm/include/llvm/IR/IntrinsicsAArch64.td @@ -1816,6 +1816,7 @@ def int_aarch64_sve_sqrshrunb : SVE2_1VectorArg_Imm_Narrowing_Intrinsic; def int_aarch64_sve_sqrshrunt : SVE2_2VectorArg_Imm_Narrowing_Intrinsic; +// SVE2 MLA LANE. def int_aarch64_sve_smlalb_lane : SVE2_3VectorArg_Indexed_Intrinsic; def int_aarch64_sve_smlalt_lane : SVE2_3VectorArg_Indexed_Intrinsic; def int_aarch64_sve_umlalb_lane : SVE2_3VectorArg_Indexed_Intrinsic; @@ -1829,4 +1830,22 @@ def int_aarch64_sve_sqdmlslb_lane : SVE2_3VectorArg_Indexed_Intrinsic; def int_aarch64_sve_sqdmlslt_lane : SVE2_3VectorArg_Indexed_Intrinsic; +// SVE2 MLA Unpredicated. +def int_aarch64_sve_smlalb : SVE2_3VectorArg_Long_Intrinsic; +def int_aarch64_sve_smlalt : SVE2_3VectorArg_Long_Intrinsic; +def int_aarch64_sve_umlalb : SVE2_3VectorArg_Long_Intrinsic; +def int_aarch64_sve_umlalt : SVE2_3VectorArg_Long_Intrinsic; +def int_aarch64_sve_smlslb : SVE2_3VectorArg_Long_Intrinsic; +def int_aarch64_sve_smlslt : SVE2_3VectorArg_Long_Intrinsic; +def int_aarch64_sve_umlslb : SVE2_3VectorArg_Long_Intrinsic; +def int_aarch64_sve_umlslt : SVE2_3VectorArg_Long_Intrinsic; + +def int_aarch64_sve_sqdmlalb : SVE2_3VectorArg_Long_Intrinsic; +def int_aarch64_sve_sqdmlalt : SVE2_3VectorArg_Long_Intrinsic; +def int_aarch64_sve_sqdmlslb : SVE2_3VectorArg_Long_Intrinsic; +def int_aarch64_sve_sqdmlslt : SVE2_3VectorArg_Long_Intrinsic; +def int_aarch64_sve_sqdmlalbt : SVE2_3VectorArg_Long_Intrinsic; +def int_aarch64_sve_sqdmlslbt : SVE2_3VectorArg_Long_Intrinsic; + + } diff --git a/llvm/lib/Target/AArch64/AArch64SVEInstrInfo.td b/llvm/lib/Target/AArch64/AArch64SVEInstrInfo.td --- a/llvm/lib/Target/AArch64/AArch64SVEInstrInfo.td +++ b/llvm/lib/Target/AArch64/AArch64SVEInstrInfo.td @@ -1477,14 +1477,14 @@ defm UMLSLT_ZZZI : sve2_int_mla_long_by_indexed_elem<0b1111, "umlslt", int_aarch64_sve_umlslt_lane>; // SVE2 integer multiply-add long (vectors, unpredicated) - defm SMLALB_ZZZ : sve2_int_mla_long<0b10000, "smlalb">; - defm SMLALT_ZZZ : sve2_int_mla_long<0b10001, "smlalt">; - defm UMLALB_ZZZ : sve2_int_mla_long<0b10010, "umlalb">; - defm UMLALT_ZZZ : sve2_int_mla_long<0b10011, "umlalt">; - defm SMLSLB_ZZZ : sve2_int_mla_long<0b10100, "smlslb">; - defm SMLSLT_ZZZ : sve2_int_mla_long<0b10101, "smlslt">; - defm UMLSLB_ZZZ : sve2_int_mla_long<0b10110, "umlslb">; - defm UMLSLT_ZZZ : sve2_int_mla_long<0b10111, "umlslt">; + defm SMLALB_ZZZ : sve2_int_mla_long<0b10000, "smlalb", int_aarch64_sve_smlalb>; + defm SMLALT_ZZZ : sve2_int_mla_long<0b10001, "smlalt", int_aarch64_sve_smlalt>; + defm UMLALB_ZZZ : sve2_int_mla_long<0b10010, "umlalb", int_aarch64_sve_umlalb>; + defm UMLALT_ZZZ : sve2_int_mla_long<0b10011, "umlalt", int_aarch64_sve_umlalt>; + defm SMLSLB_ZZZ : sve2_int_mla_long<0b10100, "smlslb", int_aarch64_sve_smlslb>; + defm SMLSLT_ZZZ : sve2_int_mla_long<0b10101, "smlslt", int_aarch64_sve_smlslt>; + defm UMLSLB_ZZZ : sve2_int_mla_long<0b10110, "umlslb", int_aarch64_sve_umlslb>; + defm UMLSLT_ZZZ : sve2_int_mla_long<0b10111, "umlslt", int_aarch64_sve_umlslt>; // SVE2 saturating multiply-add long (indexed) defm SQDMLALB_ZZZI : sve2_int_mla_long_by_indexed_elem<0b0100, "sqdmlalb", int_aarch64_sve_sqdmlalb_lane>; @@ -1493,14 +1493,14 @@ defm SQDMLSLT_ZZZI : sve2_int_mla_long_by_indexed_elem<0b0111, "sqdmlslt", int_aarch64_sve_sqdmlslt_lane>; // SVE2 saturating multiply-add long (vectors, unpredicated) - defm SQDMLALB_ZZZ : sve2_int_mla_long<0b11000, "sqdmlalb">; - defm SQDMLALT_ZZZ : sve2_int_mla_long<0b11001, "sqdmlalt">; - defm SQDMLSLB_ZZZ : sve2_int_mla_long<0b11010, "sqdmlslb">; - defm SQDMLSLT_ZZZ : sve2_int_mla_long<0b11011, "sqdmlslt">; + defm SQDMLALB_ZZZ : sve2_int_mla_long<0b11000, "sqdmlalb", int_aarch64_sve_sqdmlalb>; + defm SQDMLALT_ZZZ : sve2_int_mla_long<0b11001, "sqdmlalt", int_aarch64_sve_sqdmlalt>; + defm SQDMLSLB_ZZZ : sve2_int_mla_long<0b11010, "sqdmlslb", int_aarch64_sve_sqdmlslb>; + defm SQDMLSLT_ZZZ : sve2_int_mla_long<0b11011, "sqdmlslt", int_aarch64_sve_sqdmlslt>; // SVE2 saturating multiply-add interleaved long - defm SQDMLALBT_ZZZ : sve2_int_mla_long<0b00010, "sqdmlalbt">; - defm SQDMLSLBT_ZZZ : sve2_int_mla_long<0b00011, "sqdmlslbt">; + defm SQDMLALBT_ZZZ : sve2_int_mla_long<0b00010, "sqdmlalbt", int_aarch64_sve_sqdmlalbt>; + defm SQDMLSLBT_ZZZ : sve2_int_mla_long<0b00011, "sqdmlslbt", int_aarch64_sve_sqdmlslbt>; // SVE2 integer halving add/subtract (predicated) defm SHADD_ZPmZ : sve2_int_arith_pred<0b100000, "shadd", int_aarch64_sve_shadd>; diff --git a/llvm/lib/Target/AArch64/SVEInstrFormats.td b/llvm/lib/Target/AArch64/SVEInstrFormats.td --- a/llvm/lib/Target/AArch64/SVEInstrFormats.td +++ b/llvm/lib/Target/AArch64/SVEInstrFormats.td @@ -2352,10 +2352,14 @@ def : SVE_3_Op_Pat(NAME # _D)>; } -multiclass sve2_int_mla_long opc, string asm> { +multiclass sve2_int_mla_long opc, string asm, SDPatternOperator op> { def _H : sve2_int_mla<0b01, opc, asm, ZPR16, ZPR8>; def _S : sve2_int_mla<0b10, opc, asm, ZPR32, ZPR16>; def _D : sve2_int_mla<0b11, opc, asm, ZPR64, ZPR32>; + + def : SVE_3_Op_Pat(NAME # _H)>; + def : SVE_3_Op_Pat(NAME # _S)>; + def : SVE_3_Op_Pat(NAME # _D)>; } //===----------------------------------------------------------------------===// diff --git a/llvm/test/CodeGen/AArch64/sve2-mla-unpredicated.ll b/llvm/test/CodeGen/AArch64/sve2-mla-unpredicated.ll new file mode 100644 --- /dev/null +++ b/llvm/test/CodeGen/AArch64/sve2-mla-unpredicated.ll @@ -0,0 +1,590 @@ +; RUN: llc -mtriple=aarch64-linux-gnu -mattr=+sve2 < %s | FileCheck %s + +; +; SMLALB +; +define @smlalb_i16( %a, + %b, + %c) { +; CHECK-LABEL: smlalb_i16 +; CHECK: smlalb z0.h, z1.b, z2.b +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.smlalb.nxv8i16( %a, + %b, + %c) + ret %res +} + +define @smlalb_i32( %a, + %b, + %c) { +; CHECK-LABEL: smlalb_i32 +; CHECK: smlalb z0.s, z1.h, z2.h +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.smlalb.nxv4i32( %a, + %b, + %c) + ret %res +} + +define @smlalb_i64( %a, + %b, + %c) { +; CHECK-LABEL: smlalb_i64 +; CHECK: smlalb z0.d, z1.s, z2.s +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.smlalb.nxv2i64( %a, + %b, + %c) + ret %res +} + +; +; SMLALT +; +define @smlalt_i16( %a, + %b, + %c) { +; CHECK-LABEL: smlalt_i16 +; CHECK: smlalt z0.h, z1.b, z2.b +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.smlalt.nxv8i16( %a, + %b, + %c) + ret %res +} + +define @smlalt_i32( %a, + %b, + %c) { +; CHECK-LABEL: smlalt_i32 +; CHECK: smlalt z0.s, z1.h, z2.h +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.smlalt.nxv4i32( %a, + %b, + %c) + ret %res +} + +define @smlalt_i64( %a, + %b, + %c) { +; CHECK-LABEL: smlalt_i64 +; CHECK: smlalt z0.d, z1.s, z2.s +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.smlalt.nxv2i64( %a, + %b, + %c) + ret %res +} + +; +; UMLALB +; +define @umlalb_i16( %a, + %b, + %c) { +; CHECK-LABEL: umlalb_i16 +; CHECK: umlalb z0.h, z1.b, z2.b +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.umlalb.nxv8i16( %a, + %b, + %c) + ret %res +} + +define @umlalb_i32( %a, + %b, + %c) { +; CHECK-LABEL: umlalb_i32 +; CHECK: umlalb z0.s, z1.h, z2.h +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.umlalb.nxv4i32( %a, + %b, + %c) + ret %res +} + +define @umlalb_i64( %a, + %b, + %c) { +; CHECK-LABEL: umlalb_i64 +; CHECK: umlalb z0.d, z1.s, z2.s +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.umlalb.nxv2i64( %a, + %b, + %c) + ret %res +} + +; +; UMLALT +; +define @umlalt_i16( %a, + %b, + %c) { +; CHECK-LABEL: umlalt_i16 +; CHECK: umlalt z0.h, z1.b, z2.b +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.umlalt.nxv8i16( %a, + %b, + %c) + ret %res +} + +define @umlalt_i32( %a, + %b, + %c) { +; CHECK-LABEL: umlalt_i32 +; CHECK: umlalt z0.s, z1.h, z2.h +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.umlalt.nxv4i32( %a, + %b, + %c) + ret %res +} + +define @umlalt_i64( %a, + %b, + %c) { +; CHECK-LABEL: umlalt_i64 +; CHECK: umlalt z0.d, z1.s, z2.s +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.umlalt.nxv2i64( %a, + %b, + %c) + ret %res +} + +; +; SMLSLB +; +define @smlslb_i16( %a, + %b, + %c) { +; CHECK-LABEL: smlslb_i16 +; CHECK: smlslb z0.h, z1.b, z2.b +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.smlslb.nxv8i16( %a, + %b, + %c) + ret %res +} + +define @smlslb_i32( %a, + %b, + %c) { +; CHECK-LABEL: smlslb_i32 +; CHECK: smlslb z0.s, z1.h, z2.h +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.smlslb.nxv4i32( %a, + %b, + %c) + ret %res +} + +define @smlslb_i64( %a, + %b, + %c) { +; CHECK-LABEL: smlslb_i64 +; CHECK: smlslb z0.d, z1.s, z2.s +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.smlslb.nxv2i64( %a, + %b, + %c) + ret %res +} + +; +; SMLSLT +; +define @smlslt_i16( %a, + %b, + %c) { +; CHECK-LABEL: smlslt_i16 +; CHECK: smlslt z0.h, z1.b, z2.b +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.smlslt.nxv8i16( %a, + %b, + %c) + ret %res +} + +define @smlslt_i32( %a, + %b, + %c) { +; CHECK-LABEL: smlslt_i32 +; CHECK: smlslt z0.s, z1.h, z2.h +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.smlslt.nxv4i32( %a, + %b, + %c) + ret %res +} + +define @smlslt_i64( %a, + %b, + %c) { +; CHECK-LABEL: smlslt_i64 +; CHECK: smlslt z0.d, z1.s, z2.s +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.smlslt.nxv2i64( %a, + %b, + %c) + ret %res +} + +; +; UMLSLB +; +define @umlslb_i16( %a, + %b, + %c) { +; CHECK-LABEL: umlslb_i16 +; CHECK: umlslb z0.h, z1.b, z2.b +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.umlslb.nxv8i16( %a, + %b, + %c) + ret %res +} + +define @umlslb_i32( %a, + %b, + %c) { +; CHECK-LABEL: umlslb_i32 +; CHECK: umlslb z0.s, z1.h, z2.h +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.umlslb.nxv4i32( %a, + %b, + %c) + ret %res +} + +define @umlslb_i64( %a, + %b, + %c) { +; CHECK-LABEL: umlslb_i64 +; CHECK: umlslb z0.d, z1.s, z2.s +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.umlslb.nxv2i64( %a, + %b, + %c) + ret %res +} + +; +; UMLSLT +; +define @umlslt_i16( %a, + %b, + %c) { +; CHECK-LABEL: umlslt_i16 +; CHECK: umlslt z0.h, z1.b, z2.b +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.umlslt.nxv8i16( %a, + %b, + %c) + ret %res +} + +define @umlslt_i32( %a, + %b, + %c) { +; CHECK-LABEL: umlslt_i32 +; CHECK: umlslt z0.s, z1.h, z2.h +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.umlslt.nxv4i32( %a, + %b, + %c) + ret %res +} + +define @umlslt_i64( %a, + %b, + %c) { +; CHECK-LABEL: umlslt_i64 +; CHECK: umlslt z0.d, z1.s, z2.s +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.umlslt.nxv2i64( %a, + %b, + %c) + ret %res +} + +; +; SQDMLALB +; +define @sqdmlalb_i16( %a, + %b, + %c) { +; CHECK-LABEL: sqdmlalb_i16 +; CHECK: sqdmlalb z0.h, z1.b, z2.b +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sqdmlalb.nxv8i16( %a, + %b, + %c) + ret %res +} + +define @sqdmlalb_i32( %a, + %b, + %c) { +; CHECK-LABEL: sqdmlalb_i32 +; CHECK: sqdmlalb z0.s, z1.h, z2.h +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sqdmlalb.nxv4i32( %a, + %b, + %c) + ret %res +} + +define @sqdmlalb_i64( %a, + %b, + %c) { +; CHECK-LABEL: sqdmlalb_i64 +; CHECK: sqdmlalb z0.d, z1.s, z2.s +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sqdmlalb.nxv2i64( %a, + %b, + %c) + ret %res +} + +; +; SQDMLALT +; +define @sqdmlalt_i16( %a, + %b, + %c) { +; CHECK-LABEL: sqdmlalt_i16 +; CHECK: sqdmlalt z0.h, z1.b, z2.b +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sqdmlalt.nxv8i16( %a, + %b, + %c) + ret %res +} + +define @sqdmlalt_i32( %a, + %b, + %c) { +; CHECK-LABEL: sqdmlalt_i32 +; CHECK: sqdmlalt z0.s, z1.h, z2.h +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sqdmlalt.nxv4i32( %a, + %b, + %c) + ret %res +} + +define @sqdmlalt_i64( %a, + %b, + %c) { +; CHECK-LABEL: sqdmlalt_i64 +; CHECK: sqdmlalt z0.d, z1.s, z2.s +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sqdmlalt.nxv2i64( %a, + %b, + %c) + ret %res +} + +; +; SQDMLSLB +; +define @sqdmlslb_i16( %a, + %b, + %c) { +; CHECK-LABEL: sqdmlslb_i16 +; CHECK: sqdmlslb z0.h, z1.b, z2.b +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sqdmlslb.nxv8i16( %a, + %b, + %c) + ret %res +} + +define @sqdmlslb_i32( %a, + %b, + %c) { +; CHECK-LABEL: sqdmlslb_i32 +; CHECK: sqdmlslb z0.s, z1.h, z2.h +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sqdmlslb.nxv4i32( %a, + %b, + %c) + ret %res +} + +define @sqdmlslb_i64( %a, + %b, + %c) { +; CHECK-LABEL: sqdmlslb_i64 +; CHECK: sqdmlslb z0.d, z1.s, z2.s +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sqdmlslb.nxv2i64( %a, + %b, + %c) + ret %res +} + +; +; SQDMLSLT +; +define @sqdmlslt_i16( %a, + %b, + %c) { +; CHECK-LABEL: sqdmlslt_i16 +; CHECK: sqdmlslt z0.h, z1.b, z2.b +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sqdmlslt.nxv8i16( %a, + %b, + %c) + ret %res +} + +define @sqdmlslt_i32( %a, + %b, + %c) { +; CHECK-LABEL: sqdmlslt_i32 +; CHECK: sqdmlslt z0.s, z1.h, z2.h +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sqdmlslt.nxv4i32( %a, + %b, + %c) + ret %res +} + +define @sqdmlslt_i64( %a, + %b, + %c) { +; CHECK-LABEL: sqdmlslt_i64 +; CHECK: sqdmlslt z0.d, z1.s, z2.s +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sqdmlslt.nxv2i64( %a, + %b, + %c) + ret %res +} + +; +; SQDMLALBT +; +define @sqdmlalbt_i16( %a, + %b, + %c) { +; CHECK-LABEL: sqdmlalbt_i16 +; CHECK: sqdmlalbt z0.h, z1.b, z2.b +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sqdmlalbt.nxv8i16( %a, + %b, + %c) + ret %res +} + +define @sqdmlalbt_i32( %a, + %b, + %c) { +; CHECK-LABEL: sqdmlalbt_i32 +; CHECK: sqdmlalbt z0.s, z1.h, z2.h +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sqdmlalbt.nxv4i32( %a, + %b, + %c) + ret %res +} + +define @sqdmlalbt_i64( %a, + %b, + %c) { +; CHECK-LABEL: sqdmlalbt_i64 +; CHECK: sqdmlalbt z0.d, z1.s, z2.s +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sqdmlalbt.nxv2i64( %a, + %b, + %c) + ret %res +} + +; +; SQDMLSLBT +; +define @sqdmlslbt_i16( %a, + %b, + %c) { +; CHECK-LABEL: sqdmlslbt_i16 +; CHECK: sqdmlslbt z0.h, z1.b, z2.b +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sqdmlslbt.nxv8i16( %a, + %b, + %c) + ret %res +} + +define @sqdmlslbt_i32( %a, + %b, + %c) { +; CHECK-LABEL: sqdmlslbt_i32 +; CHECK: sqdmlslbt z0.s, z1.h, z2.h +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sqdmlslbt.nxv4i32( %a, + %b, + %c) + ret %res +} + +define @sqdmlslbt_i64( %a, + %b, + %c) { +; CHECK-LABEL: sqdmlslbt_i64 +; CHECK: sqdmlslbt z0.d, z1.s, z2.s +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sqdmlslbt.nxv2i64( %a, + %b, + %c) + ret %res +} + +declare @llvm.aarch64.sve.smlalb.nxv8i16(, , ) +declare @llvm.aarch64.sve.smlalb.nxv4i32(, , ) +declare @llvm.aarch64.sve.smlalb.nxv2i64(, , ) +declare @llvm.aarch64.sve.smlalt.nxv8i16(, , ) +declare @llvm.aarch64.sve.smlalt.nxv4i32(, , ) +declare @llvm.aarch64.sve.smlalt.nxv2i64(, , ) +declare @llvm.aarch64.sve.umlalb.nxv8i16(, , ) +declare @llvm.aarch64.sve.umlalb.nxv4i32(, , ) +declare @llvm.aarch64.sve.umlalb.nxv2i64(, , ) +declare @llvm.aarch64.sve.umlalt.nxv8i16(, , ) +declare @llvm.aarch64.sve.umlalt.nxv4i32(, , ) +declare @llvm.aarch64.sve.umlalt.nxv2i64(, , ) +declare @llvm.aarch64.sve.smlslb.nxv8i16(, , ) +declare @llvm.aarch64.sve.smlslb.nxv4i32(, , ) +declare @llvm.aarch64.sve.smlslb.nxv2i64(, , ) +declare @llvm.aarch64.sve.smlslt.nxv8i16(, , ) +declare @llvm.aarch64.sve.smlslt.nxv4i32(, , ) +declare @llvm.aarch64.sve.smlslt.nxv2i64(, , ) +declare @llvm.aarch64.sve.umlslb.nxv8i16(, , ) +declare @llvm.aarch64.sve.umlslb.nxv4i32(, , ) +declare @llvm.aarch64.sve.umlslb.nxv2i64(, , ) +declare @llvm.aarch64.sve.umlslt.nxv8i16(, , ) +declare @llvm.aarch64.sve.umlslt.nxv4i32(, , ) +declare @llvm.aarch64.sve.umlslt.nxv2i64(, , ) +declare @llvm.aarch64.sve.sqdmlalb.nxv8i16(, , ) +declare @llvm.aarch64.sve.sqdmlalb.nxv4i32(, , ) +declare @llvm.aarch64.sve.sqdmlalb.nxv2i64(, , ) +declare @llvm.aarch64.sve.sqdmlalt.nxv8i16(, , ) +declare @llvm.aarch64.sve.sqdmlalt.nxv4i32(, , ) +declare @llvm.aarch64.sve.sqdmlalt.nxv2i64(, , ) +declare @llvm.aarch64.sve.sqdmlslb.nxv8i16(, , ) +declare @llvm.aarch64.sve.sqdmlslb.nxv4i32(, , ) +declare @llvm.aarch64.sve.sqdmlslb.nxv2i64(, , ) +declare @llvm.aarch64.sve.sqdmlslt.nxv8i16(, , ) +declare @llvm.aarch64.sve.sqdmlslt.nxv4i32(, , ) +declare @llvm.aarch64.sve.sqdmlslt.nxv2i64(, , ) +declare @llvm.aarch64.sve.sqdmlalbt.nxv8i16(, , ) +declare @llvm.aarch64.sve.sqdmlalbt.nxv4i32(, , ) +declare @llvm.aarch64.sve.sqdmlalbt.nxv2i64(, , ) +declare @llvm.aarch64.sve.sqdmlslbt.nxv8i16(, , ) +declare @llvm.aarch64.sve.sqdmlslbt.nxv4i32(, , ) +declare @llvm.aarch64.sve.sqdmlslbt.nxv2i64(, , )