Index: include/llvm/IR/IntrinsicsAArch64.td =================================================================== --- include/llvm/IR/IntrinsicsAArch64.td +++ include/llvm/IR/IntrinsicsAArch64.td @@ -775,6 +775,11 @@ llvm_anyvector_ty], [IntrNoMem]>; + class AdvSIMD_SVE_Unpack_Intrinsic + : Intrinsic<[llvm_anyvector_ty], + [LLVMSubdivide2VectorType<0>], + [IntrNoMem]>; + class AdvSIMD_SVE_PUNPKHI_Intrinsic : Intrinsic<[LLVMHalfElementsVectorType<0>], [llvm_anyvector_ty], @@ -806,6 +811,16 @@ def int_aarch64_sve_cnt : AdvSIMD_SVE_CNT_Intrinsic; // +// Permutations and selection +// + +def int_aarch64_sve_sunpkhi : AdvSIMD_SVE_Unpack_Intrinsic; +def int_aarch64_sve_sunpklo : AdvSIMD_SVE_Unpack_Intrinsic; + +def int_aarch64_sve_uunpkhi : AdvSIMD_SVE_Unpack_Intrinsic; +def int_aarch64_sve_uunpklo : AdvSIMD_SVE_Unpack_Intrinsic; + +// // Floating-point comparisons // Index: lib/Target/AArch64/AArch64ISelLowering.h =================================================================== --- lib/Target/AArch64/AArch64ISelLowering.h +++ lib/Target/AArch64/AArch64ISelLowering.h @@ -191,6 +191,11 @@ FRECPE, FRECPS, FRSQRTE, FRSQRTS, + SUNPKHI, + SUNPKLO, + UUNPKHI, + UUNPKLO, + // NEON Load/Store with post-increment base updates LD2post = ISD::FIRST_TARGET_MEMORY_OPCODE, LD3post, Index: lib/Target/AArch64/AArch64ISelLowering.cpp =================================================================== --- lib/Target/AArch64/AArch64ISelLowering.cpp +++ lib/Target/AArch64/AArch64ISelLowering.cpp @@ -1300,6 +1300,10 @@ case AArch64ISD::STZG: return "AArch64ISD::STZG"; case AArch64ISD::ST2G: return "AArch64ISD::ST2G"; case AArch64ISD::STZ2G: return "AArch64ISD::STZ2G"; + case AArch64ISD::SUNPKHI: return "AArch64ISD::SUNPKHI"; + case AArch64ISD::SUNPKLO: return "AArch64ISD::SUNPKLO"; + case AArch64ISD::UUNPKHI: return "AArch64ISD::UUNPKHI"; + case AArch64ISD::UUNPKLO: return "AArch64ISD::UUNPKLO"; } return nullptr; } @@ -2838,6 +2842,19 @@ return DAG.getNode(ISD::UMIN, dl, Op.getValueType(), Op.getOperand(1), Op.getOperand(2)); + case Intrinsic::aarch64_sve_sunpkhi: + return DAG.getNode(AArch64ISD::SUNPKHI, dl, Op.getValueType(), + Op.getOperand(1)); + case Intrinsic::aarch64_sve_sunpklo: + return DAG.getNode(AArch64ISD::SUNPKLO, dl, Op.getValueType(), + Op.getOperand(1)); + case Intrinsic::aarch64_sve_uunpkhi: + return DAG.getNode(AArch64ISD::UUNPKHI, dl, Op.getValueType(), + Op.getOperand(1)); + case Intrinsic::aarch64_sve_uunpklo: + return DAG.getNode(AArch64ISD::UUNPKLO, dl, Op.getValueType(), + Op.getOperand(1)); + case Intrinsic::localaddress: { const auto &MF = DAG.getMachineFunction(); const auto *RegInfo = Subtarget->getRegisterInfo(); Index: lib/Target/AArch64/AArch64InstrInfo.td =================================================================== --- lib/Target/AArch64/AArch64InstrInfo.td +++ lib/Target/AArch64/AArch64InstrInfo.td @@ -421,6 +421,14 @@ def AArch64st2g : SDNode<"AArch64ISD::ST2G", SDT_AArch64SETTAG, [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>; def AArch64stz2g : SDNode<"AArch64ISD::STZ2G", SDT_AArch64SETTAG, [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>; +def SDT_AArch64unpk : SDTypeProfile<1, 1, [ + SDTCisInt<0>, SDTCisInt<1>, SDTCisOpSmallerThanOp<1, 0> +]>; +def AArch64sunpkhi : SDNode<"AArch64ISD::SUNPKHI", SDT_AArch64unpk>; +def AArch64sunpklo : SDNode<"AArch64ISD::SUNPKLO", SDT_AArch64unpk>; +def AArch64uunpkhi : SDNode<"AArch64ISD::UUNPKHI", SDT_AArch64unpk>; +def AArch64uunpklo : SDNode<"AArch64ISD::UUNPKLO", SDT_AArch64unpk>; + //===----------------------------------------------------------------------===// //===----------------------------------------------------------------------===// Index: lib/Target/AArch64/AArch64SVEInstrInfo.td =================================================================== --- lib/Target/AArch64/AArch64SVEInstrInfo.td +++ lib/Target/AArch64/AArch64SVEInstrInfo.td @@ -211,10 +211,10 @@ defm REV_PP : sve_int_perm_reverse_p<"rev">; defm REV_ZZ : sve_int_perm_reverse_z<"rev">; - defm SUNPKLO_ZZ : sve_int_perm_unpk<0b00, "sunpklo">; - defm SUNPKHI_ZZ : sve_int_perm_unpk<0b01, "sunpkhi">; - defm UUNPKLO_ZZ : sve_int_perm_unpk<0b10, "uunpklo">; - defm UUNPKHI_ZZ : sve_int_perm_unpk<0b11, "uunpkhi">; + defm SUNPKLO_ZZ : sve_int_perm_unpk<0b00, "sunpklo", AArch64sunpklo>; + defm SUNPKHI_ZZ : sve_int_perm_unpk<0b01, "sunpkhi", AArch64sunpkhi>; + defm UUNPKLO_ZZ : sve_int_perm_unpk<0b10, "uunpklo", AArch64uunpklo>; + defm UUNPKHI_ZZ : sve_int_perm_unpk<0b11, "uunpkhi", AArch64uunpkhi>; defm PUNPKLO_PP : sve_int_perm_punpk<0b0, "punpklo", int_aarch64_sve_punpklo>; defm PUNPKHI_PP : sve_int_perm_punpk<0b1, "punpkhi", int_aarch64_sve_punpkhi>; Index: lib/Target/AArch64/SVEInstrFormats.td =================================================================== --- lib/Target/AArch64/SVEInstrFormats.td +++ lib/Target/AArch64/SVEInstrFormats.td @@ -848,10 +848,14 @@ let Inst{4-0} = Zd; } -multiclass sve_int_perm_unpk opc, string asm> { +multiclass sve_int_perm_unpk opc, string asm, SDPatternOperator op> { def _H : sve_int_perm_unpk<0b01, opc, asm, ZPR16, ZPR8>; def _S : sve_int_perm_unpk<0b10, opc, asm, ZPR32, ZPR16>; def _D : sve_int_perm_unpk<0b11, opc, asm, ZPR64, ZPR32>; + + def : SVE_1_Op_Pat(NAME # _H)>; + def : SVE_1_Op_Pat(NAME # _S)>; + def : SVE_1_Op_Pat(NAME # _D)>; } class sve_int_perm_insrs sz8_64, string asm, ZPRRegOp zprty, Index: test/CodeGen/AArch64/sve-intrinsics-perm-select.ll =================================================================== --- /dev/null +++ test/CodeGen/AArch64/sve-intrinsics-perm-select.ll @@ -0,0 +1,129 @@ +; RUN: llc -mtriple=aarch64-linux-gnu -mattr=+sve < %s | FileCheck %s + +; +; SUNPKHI +; + +define @sunpkhi_i16( %a) { +; CHECK-LABEL: sunpkhi_i16 +; CHECK: sunpkhi z0.h, z0.b +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sunpkhi.nxv8i16( %a) + ret %res +} + +define @sunpkhi_i32( %a) { +; CHECK-LABEL: sunpkhi_i32 +; CHECK: sunpkhi z0.s, z0.h +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sunpkhi.nxv4i32( %a) + ret %res +} + +define @sunpkhi_i64( %a) { +; CHECK-LABEL: sunpkhi_i64 +; CHECK: sunpkhi z0.d, z0.s +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sunpkhi.nxv2i64( %a) + ret %res +} + +; +; SUNPKLO +; + +define @sunpklo_i16( %a) { +; CHECK-LABEL: sunpklo_i16 +; CHECK: sunpklo z0.h, z0.b +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sunpklo.nxv8i16( %a) + ret %res +} + +define @sunpklo_i32( %a) { +; CHECK-LABEL: sunpklo_i32 +; CHECK: sunpklo z0.s, z0.h +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sunpklo.nxv4i32( %a) + ret %res +} + +define @sunpklo_i64( %a) { +; CHECK-LABEL: sunpklo_i64 +; CHECK: sunpklo z0.d, z0.s +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.sunpklo.nxv2i64( %a) + ret %res +} + +; +; UUNPKHI +; + +define @uunpkhi_i16( %a) { +; CHECK-LABEL: uunpkhi_i16 +; CHECK: uunpkhi z0.h, z0.b +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.uunpkhi.nxv8i16( %a) + ret %res +} + +define @uunpkhi_i32( %a) { +; CHECK-LABEL: uunpkhi_i32 +; CHECK: uunpkhi z0.s, z0.h +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.uunpkhi.nxv4i32( %a) + ret %res +} + +define @uunpkhi_i64( %a) { +; CHECK-LABEL: uunpkhi_i64 +; CHECK: uunpkhi z0.d, z0.s +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.uunpkhi.nxv2i64( %a) + ret %res +} + +; +; UUNPKLO +; + +define @uunpklo_i16( %a) { +; CHECK-LABEL: uunpklo_i16 +; CHECK: uunpklo z0.h, z0.b +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.uunpklo.nxv8i16( %a) + ret %res +} + +define @uunpklo_i32( %a) { +; CHECK-LABEL: uunpklo_i32 +; CHECK: uunpklo z0.s, z0.h +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.uunpklo.nxv4i32( %a) + ret %res +} + +define @uunpklo_i64( %a) { +; CHECK-LABEL: uunpklo_i64 +; CHECK: uunpklo z0.d, z0.s +; CHECK-NEXT: ret + %res = call @llvm.aarch64.sve.uunpklo.nxv2i64( %a) + ret %res +} + +declare @llvm.aarch64.sve.sunpkhi.nxv8i16() +declare @llvm.aarch64.sve.sunpkhi.nxv4i32() +declare @llvm.aarch64.sve.sunpkhi.nxv2i64() + +declare @llvm.aarch64.sve.sunpklo.nxv8i16() +declare @llvm.aarch64.sve.sunpklo.nxv4i32() +declare @llvm.aarch64.sve.sunpklo.nxv2i64() + +declare @llvm.aarch64.sve.uunpkhi.nxv8i16() +declare @llvm.aarch64.sve.uunpkhi.nxv4i32() +declare @llvm.aarch64.sve.uunpkhi.nxv2i64() + +declare @llvm.aarch64.sve.uunpklo.nxv8i16() +declare @llvm.aarch64.sve.uunpklo.nxv4i32() +declare @llvm.aarch64.sve.uunpklo.nxv2i64()