diff --git a/llvm/include/llvm/IR/IntrinsicsAArch64.td b/llvm/include/llvm/IR/IntrinsicsAArch64.td --- a/llvm/include/llvm/IR/IntrinsicsAArch64.td +++ b/llvm/include/llvm/IR/IntrinsicsAArch64.td @@ -1931,5 +1931,10 @@ def int_aarch64_sve_sqdmlalbt : SVE2_3VectorArg_Long_Intrinsic; def int_aarch64_sve_sqdmlslbt : SVE2_3VectorArg_Long_Intrinsic; +// SVE2 ADDSUB Long Unpredicated. +def int_aarch64_sve_adclb : AdvSIMD_3VectorArg_Intrinsic; +def int_aarch64_sve_adclt : AdvSIMD_3VectorArg_Intrinsic; +def int_aarch64_sve_sbclb : AdvSIMD_3VectorArg_Intrinsic; +def int_aarch64_sve_sbclt : AdvSIMD_3VectorArg_Intrinsic; } diff --git a/llvm/lib/Target/AArch64/AArch64SVEInstrInfo.td b/llvm/lib/Target/AArch64/AArch64SVEInstrInfo.td --- a/llvm/lib/Target/AArch64/AArch64SVEInstrInfo.td +++ b/llvm/lib/Target/AArch64/AArch64SVEInstrInfo.td @@ -1619,10 +1619,10 @@ defm UABALT_ZZZ : sve2_int_absdiff_accum_long<0b11, "uabalt", int_aarch64_sve_uabalt>; // SVE2 integer add/subtract long with carry - defm ADCLB_ZZZ : sve2_int_addsub_long_carry<0b00, "adclb">; - defm ADCLT_ZZZ : sve2_int_addsub_long_carry<0b01, "adclt">; - defm SBCLB_ZZZ : sve2_int_addsub_long_carry<0b10, "sbclb">; - defm SBCLT_ZZZ : sve2_int_addsub_long_carry<0b11, "sbclt">; + defm ADCLB_ZZZ : sve2_int_addsub_long_carry<0b00, "adclb", int_aarch64_sve_adclb>; + defm ADCLT_ZZZ : sve2_int_addsub_long_carry<0b01, "adclt", int_aarch64_sve_adclt>; + defm SBCLB_ZZZ : sve2_int_addsub_long_carry<0b10, "sbclb", int_aarch64_sve_sbclb>; + defm SBCLT_ZZZ : sve2_int_addsub_long_carry<0b11, "sbclt", int_aarch64_sve_sbclt>; // SVE2 bitwise shift right narrow (bottom) defm SQSHRUNB_ZZI : sve2_int_bin_shift_imm_right_narrow_bottom<0b000, "sqshrunb", int_aarch64_sve_sqshrunb>; diff --git a/llvm/lib/Target/AArch64/SVEInstrFormats.td b/llvm/lib/Target/AArch64/SVEInstrFormats.td --- a/llvm/lib/Target/AArch64/SVEInstrFormats.td +++ b/llvm/lib/Target/AArch64/SVEInstrFormats.td @@ -3189,11 +3189,14 @@ def : SVE_3_Op_Pat(NAME # _D)>; } -multiclass sve2_int_addsub_long_carry opc, string asm> { +multiclass sve2_int_addsub_long_carry opc, string asm, SDPatternOperator op> { def _S : sve2_int_absdiff_accum<{ opc{1}, 0b0 }, { 0b010, opc{0} }, asm, ZPR32, ZPR32>; def _D : sve2_int_absdiff_accum<{ opc{1}, 0b1 }, { 0b010, opc{0} }, asm, ZPR64, ZPR64>; + + def : SVE_3_Op_Pat(NAME # _S)>; + def : SVE_3_Op_Pat(NAME # _D)>; } //===----------------------------------------------------------------------===// diff --git a/llvm/test/CodeGen/AArch64/sve2-int-addsub-long.ll b/llvm/test/CodeGen/AArch64/sve2-int-addsub-long.ll new file mode 100644 --- /dev/null +++ b/llvm/test/CodeGen/AArch64/sve2-int-addsub-long.ll @@ -0,0 +1,102 @@ +; RUN: llc -mtriple=aarch64-linux-gnu -mattr=+sve2 < %s | FileCheck %s + +; +; ADCLB (vector, long, unpredicated) +; +define @adclb_i32( %a, + %b, + %c) { +; CHECK-LABEL: adclb_i32 +; CHECK: adclb z0.s, z1.s, z2.s +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.adclb.nxv4i32( %a, %b, %c) + ret %res +} + +define @adclb_i64( %a, + %b, + %c) { +; CHECK-LABEL: adclb_i64 +; CHECK: adclb z0.d, z1.d, z2.d +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.adclb.nxv2i64( %a, %b, %c) + ret %res +} + +; +; ADCLT (vector, long, unpredicated) +; +define @adclt_i32( %a, + %b, + %c) { +; CHECK-LABEL: adclt_i32 +; CHECK: adclt z0.s, z1.s, z2.s +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.adclt.nxv4i32( %a, %b, %c) + ret %res +} + +define @adclt_i64( %a, + %b, + %c) { +; CHECK-LABEL: adclt_i64 +; CHECK: adclt z0.d, z1.d, z2.d +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.adclt.nxv2i64( %a, %b, %c) + ret %res +} + +; +; SBCLB (vector, long, unpredicated) +; +define @sbclb_i32( %a, + %b, + %c) { +; CHECK-LABEL: sbclb_i32 +; CHECK: sbclb z0.s, z1.s, z2.s +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sbclb.nxv4i32( %a, %b, %c) + ret %res +} + +define @sbclb_i64( %a, + %b, + %c) { +; CHECK-LABEL: sbclb_i64 +; CHECK: sbclb z0.d, z1.d, z2.d +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sbclb.nxv2i64( %a, %b, %c) + ret %res +} + +; +; SBCLT (vector, long, unpredicated) +; +define @sbclt_i32( %a, + %b, + %c) { +; CHECK-LABEL: sbclt_i32 +; CHECK: sbclt z0.s, z1.s, z2.s +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sbclt.nxv4i32( %a, %b, %c) + ret %res +} + +define @sbclt_i64( %a, + %b, + %c) { +; CHECK-LABEL: sbclt_i64 +; CHECK: sbclt z0.d, z1.d, z2.d +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sbclt.nxv2i64( %a, %b, %c) + ret %res +} + +declare @llvm.aarch64.sve.adclb.nxv4i32(,,) +declare @llvm.aarch64.sve.adclb.nxv2i64(,,) +declare @llvm.aarch64.sve.adclt.nxv4i32(,,) +declare @llvm.aarch64.sve.adclt.nxv2i64(,,) +declare @llvm.aarch64.sve.sbclb.nxv4i32(,,) +declare @llvm.aarch64.sve.sbclb.nxv2i64(,,) +declare @llvm.aarch64.sve.sbclt.nxv4i32(,,) +declare @llvm.aarch64.sve.sbclt.nxv2i64(,,)