Index: llvm/include/llvm/IR/IntrinsicsAArch64.td =================================================================== --- llvm/include/llvm/IR/IntrinsicsAArch64.td +++ llvm/include/llvm/IR/IntrinsicsAArch64.td @@ -1266,28 +1266,14 @@ // Logical operations // +def int_aarch64_sve_and : AdvSIMD_Pred2VectorArg_Intrinsic; +def int_aarch64_sve_bic : AdvSIMD_Pred2VectorArg_Intrinsic; def int_aarch64_sve_cnot : AdvSIMD_Merged1VectorArg_Intrinsic; +def int_aarch64_sve_eor : AdvSIMD_Pred2VectorArg_Intrinsic; def int_aarch64_sve_not : AdvSIMD_Merged1VectorArg_Intrinsic; +def int_aarch64_sve_orr : AdvSIMD_Pred2VectorArg_Intrinsic; -def int_aarch64_sve_and : AdvSIMD_Pred2VectorArg_Intrinsic; -def int_aarch64_sve_or : AdvSIMD_Pred2VectorArg_Intrinsic; -def int_aarch64_sve_xor : AdvSIMD_Pred2VectorArg_Intrinsic; -def int_aarch64_sve_bic_base : AdvSIMD_2VectorArg_Intrinsic; -def int_aarch64_sve_bic : AdvSIMD_Pred2VectorArg_Intrinsic; - -def int_aarch64_sve_eor : AdvSIMD_Pred2VectorArg_Intrinsic; -def int_aarch64_sve_ands : AdvSIMD_Pred2VectorArg_Intrinsic; -def int_aarch64_sve_bics : AdvSIMD_Pred2VectorArg_Intrinsic; -def int_aarch64_sve_eors : AdvSIMD_Pred2VectorArg_Intrinsic; -def int_aarch64_sve_orr : AdvSIMD_Pred2VectorArg_Intrinsic; -def int_aarch64_sve_orn : AdvSIMD_Pred2VectorArg_Intrinsic; -def int_aarch64_sve_nor : AdvSIMD_Pred2VectorArg_Intrinsic; -def int_aarch64_sve_nand : AdvSIMD_Pred2VectorArg_Intrinsic; -def int_aarch64_sve_orrs : AdvSIMD_Pred2VectorArg_Intrinsic; -def int_aarch64_sve_orns : AdvSIMD_Pred2VectorArg_Intrinsic; -def int_aarch64_sve_nors : AdvSIMD_Pred2VectorArg_Intrinsic; -def int_aarch64_sve_nands : AdvSIMD_Pred2VectorArg_Intrinsic; - +// TODO: Deprecated and will be replaced by isel patterns. def int_aarch64_sve_orr_imm : AdvSIMD_1VectorArg_Imm64_Intrinsic; def int_aarch64_sve_eor_imm : AdvSIMD_1VectorArg_Imm64_Intrinsic; def int_aarch64_sve_and_imm : AdvSIMD_1VectorArg_Imm64_Intrinsic; @@ -1447,6 +1433,13 @@ // Predicate operations // +def int_aarch64_sve_and_z : AdvSIMD_Pred2VectorArg_Intrinsic; +def int_aarch64_sve_bic_z : AdvSIMD_Pred2VectorArg_Intrinsic; +def int_aarch64_sve_eor_z : AdvSIMD_Pred2VectorArg_Intrinsic; +def int_aarch64_sve_nand_z : AdvSIMD_Pred2VectorArg_Intrinsic; +def int_aarch64_sve_nor_z : AdvSIMD_Pred2VectorArg_Intrinsic; +def int_aarch64_sve_orn_z : AdvSIMD_Pred2VectorArg_Intrinsic; +def int_aarch64_sve_orr_z : AdvSIMD_Pred2VectorArg_Intrinsic; def int_aarch64_sve_pfirst : AdvSIMD_Pred1VectorArg_Intrinsic; def int_aarch64_sve_pnext : AdvSIMD_Pred1VectorArg_Intrinsic; def int_aarch64_sve_punpkhi : AdvSIMD_SVE_PUNPKHI_Intrinsic; Index: llvm/lib/Target/AArch64/AArch64SVEInstrInfo.td =================================================================== --- llvm/lib/Target/AArch64/AArch64SVEInstrInfo.td +++ llvm/lib/Target/AArch64/AArch64SVEInstrInfo.td @@ -82,14 +82,14 @@ defm AND_ZZZ : sve_int_bin_cons_log<0b00, "and", and>; defm ORR_ZZZ : sve_int_bin_cons_log<0b01, "orr", or>; defm EOR_ZZZ : sve_int_bin_cons_log<0b10, "eor", xor>; - defm BIC_ZZZ : sve_int_bin_cons_log<0b11, "bic", int_aarch64_sve_bic_base>; + defm BIC_ZZZ : sve_int_bin_cons_log<0b11, "bic", null_frag>; defm ADD_ZPmZ : sve_int_bin_pred_arit_0<0b000, "add", int_aarch64_sve_add>; defm SUB_ZPmZ : sve_int_bin_pred_arit_0<0b001, "sub", int_aarch64_sve_sub>; defm SUBR_ZPmZ : sve_int_bin_pred_arit_0<0b011, "subr", int_aarch64_sve_subr>; - defm ORR_ZPmZ : sve_int_bin_pred_log<0b000, "orr", int_aarch64_sve_or>; - defm EOR_ZPmZ : sve_int_bin_pred_log<0b001, "eor", int_aarch64_sve_xor>; + defm ORR_ZPmZ : sve_int_bin_pred_log<0b000, "orr", int_aarch64_sve_orr>; + defm EOR_ZPmZ : sve_int_bin_pred_log<0b001, "eor", int_aarch64_sve_eor>; defm AND_ZPmZ : sve_int_bin_pred_log<0b010, "and", int_aarch64_sve_and>; defm BIC_ZPmZ : sve_int_bin_pred_log<0b011, "bic", int_aarch64_sve_bic>; @@ -298,21 +298,21 @@ defm PFIRST : sve_int_pfirst<0b00000, "pfirst", int_aarch64_sve_pfirst>; defm PNEXT : sve_int_pnext<0b00110, "pnext", int_aarch64_sve_pnext>; - defm AND_PPzPP : sve_int_pred_log<0b0000, "and", int_aarch64_sve_and>; - defm BIC_PPzPP : sve_int_pred_log<0b0001, "bic", int_aarch64_sve_bic>; - defm EOR_PPzPP : sve_int_pred_log<0b0010, "eor", int_aarch64_sve_eor>; + defm AND_PPzPP : sve_int_pred_log<0b0000, "and", int_aarch64_sve_and_z>; + defm BIC_PPzPP : sve_int_pred_log<0b0001, "bic", int_aarch64_sve_bic_z>; + defm EOR_PPzPP : sve_int_pred_log<0b0010, "eor", int_aarch64_sve_eor_z>; defm SEL_PPPP : sve_int_pred_log<0b0011, "sel", vselect>; - defm ANDS_PPzPP : sve_int_pred_log<0b0100, "ands", int_aarch64_sve_ands>; - defm BICS_PPzPP : sve_int_pred_log<0b0101, "bics", int_aarch64_sve_bics>; - defm EORS_PPzPP : sve_int_pred_log<0b0110, "eors", int_aarch64_sve_eors>; - defm ORR_PPzPP : sve_int_pred_log<0b1000, "orr", int_aarch64_sve_orr>; - defm ORN_PPzPP : sve_int_pred_log<0b1001, "orn", int_aarch64_sve_orn>; - defm NOR_PPzPP : sve_int_pred_log<0b1010, "nor", int_aarch64_sve_nor>; - defm NAND_PPzPP : sve_int_pred_log<0b1011, "nand", int_aarch64_sve_nand>; - defm ORRS_PPzPP : sve_int_pred_log<0b1100, "orrs", int_aarch64_sve_orrs>; - defm ORNS_PPzPP : sve_int_pred_log<0b1101, "orns", int_aarch64_sve_orns>; - defm NORS_PPzPP : sve_int_pred_log<0b1110, "nors", int_aarch64_sve_nors>; - defm NANDS_PPzPP : sve_int_pred_log<0b1111, "nands", int_aarch64_sve_nands>; + defm ANDS_PPzPP : sve_int_pred_log<0b0100, "ands", null_frag>; + defm BICS_PPzPP : sve_int_pred_log<0b0101, "bics", null_frag>; + defm EORS_PPzPP : sve_int_pred_log<0b0110, "eors", null_frag>; + defm ORR_PPzPP : sve_int_pred_log<0b1000, "orr", int_aarch64_sve_orr_z>; + defm ORN_PPzPP : sve_int_pred_log<0b1001, "orn", int_aarch64_sve_orn_z>; + defm NOR_PPzPP : sve_int_pred_log<0b1010, "nor", int_aarch64_sve_nor_z>; + defm NAND_PPzPP : sve_int_pred_log<0b1011, "nand", int_aarch64_sve_nand_z>; + defm ORRS_PPzPP : sve_int_pred_log<0b1100, "orrs", null_frag>; + defm ORNS_PPzPP : sve_int_pred_log<0b1101, "orns", null_frag>; + defm NORS_PPzPP : sve_int_pred_log<0b1110, "nors", null_frag>; + defm NANDS_PPzPP : sve_int_pred_log<0b1111, "nands", null_frag>; defm CLASTA_RPZ : sve_int_perm_clast_rz<0, "clasta">; defm CLASTB_RPZ : sve_int_perm_clast_rz<1, "clastb">; Index: llvm/test/CodeGen/AArch64/sve-int-log-pred.ll =================================================================== --- llvm/test/CodeGen/AArch64/sve-int-log-pred.ll +++ llvm/test/CodeGen/AArch64/sve-int-log-pred.ll @@ -5,8 +5,8 @@ ; CHECK: and z0.b, p0/m, z0.b, z1.b ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.and.nxv2i8( %pg, - %a, - %b) + %a, + %b) ret %out } @@ -20,7 +20,6 @@ ret %out } - define @and_pred_i32( %pg, %a, %b) { ; CHECK-LABEL: and_pred_i32: ; CHECK: and z0.s, p0/m, z0.s, z1.s @@ -41,14 +40,13 @@ ret %out } - define @or_pred_i8( %pg, %a, %b) { ; CHECK-LABEL: or_pred_i8: ; CHECK: orr z0.b, p0/m, z0.b, z1.b ; CHECK-NEXT: ret - %out = call @llvm.aarch64.sve.or.nxv2i8( %pg, - %a, - %b) + %out = call @llvm.aarch64.sve.orr.nxv2i8( %pg, + %a, + %b) ret %out } @@ -56,18 +54,17 @@ ; CHECK-LABEL: or_pred_i16: ; CHECK: orr z0.h, p0/m, z0.h, z1.h ; CHECK-NEXT: ret - %out = call @llvm.aarch64.sve.or.nxv2i16( %pg, + %out = call @llvm.aarch64.sve.orr.nxv2i16( %pg, %a, %b) ret %out } - define @or_pred_i32( %pg, %a, %b) { ; CHECK-LABEL: or_pred_i32: ; CHECK: orr z0.s, p0/m, z0.s, z1.s ; CHECK-NEXT: ret - %out = call @llvm.aarch64.sve.or.nxv2i32( %pg, + %out = call @llvm.aarch64.sve.orr.nxv2i32( %pg, %a, %b) ret %out @@ -77,20 +74,19 @@ ; CHECK-LABEL: or_pred_i64: ; CHECK: orr z0.d, p0/m, z0.d, z1.d ; CHECK-NEXT: ret - %out = call @llvm.aarch64.sve.or.nxv2i64( %pg, + %out = call @llvm.aarch64.sve.orr.nxv2i64( %pg, %a, %b) ret %out } - define @xor_pred_i8( %pg, %a, %b) { ; CHECK-LABEL: xor_pred_i8: ; CHECK: eor z0.b, p0/m, z0.b, z1.b ; CHECK-NEXT: ret - %out = call @llvm.aarch64.sve.xor.nxv2i8( %pg, - %a, - %b) + %out = call @llvm.aarch64.sve.eor.nxv2i8( %pg, + %a, + %b) ret %out } @@ -98,18 +94,17 @@ ; CHECK-LABEL: xor_pred_i16: ; CHECK: eor z0.h, p0/m, z0.h, z1.h ; CHECK-NEXT: ret - %out = call @llvm.aarch64.sve.xor.nxv2i16( %pg, + %out = call @llvm.aarch64.sve.eor.nxv2i16( %pg, %a, %b) ret %out } - define @xor_pred_i32( %pg, %a, %b) { ; CHECK-LABEL: xor_pred_i32: ; CHECK: eor z0.s, p0/m, z0.s, z1.s ; CHECK-NEXT: ret - %out = call @llvm.aarch64.sve.xor.nxv2i32( %pg, + %out = call @llvm.aarch64.sve.eor.nxv2i32( %pg, %a, %b) ret %out @@ -119,7 +114,7 @@ ; CHECK-LABEL: xor_pred_i64: ; CHECK: eor z0.d, p0/m, z0.d, z1.d ; CHECK-NEXT: ret - %out = call @llvm.aarch64.sve.xor.nxv2i64( %pg, + %out = call @llvm.aarch64.sve.eor.nxv2i64( %pg, %a, %b) ret %out @@ -130,8 +125,8 @@ ; CHECK: bic z0.b, p0/m, z0.b, z1.b ; CHECK-NEXT: ret %out = call @llvm.aarch64.sve.bic.nxv2i8( %pg, - %a, - %b) + %a, + %b) ret %out } @@ -170,14 +165,14 @@ declare @llvm.aarch64.sve.and.nxv2i16(,,) declare @llvm.aarch64.sve.and.nxv2i32(,,) declare @llvm.aarch64.sve.and.nxv2i64(,,) -declare @llvm.aarch64.sve.or.nxv2i8(,,) -declare @llvm.aarch64.sve.or.nxv2i16(,,) -declare @llvm.aarch64.sve.or.nxv2i32(,,) -declare @llvm.aarch64.sve.or.nxv2i64(,,) -declare @llvm.aarch64.sve.xor.nxv2i8(,,) -declare @llvm.aarch64.sve.xor.nxv2i16(,,) -declare @llvm.aarch64.sve.xor.nxv2i32(,,) -declare @llvm.aarch64.sve.xor.nxv2i64(,,) +declare @llvm.aarch64.sve.orr.nxv2i8(,,) +declare @llvm.aarch64.sve.orr.nxv2i16(,,) +declare @llvm.aarch64.sve.orr.nxv2i32(,,) +declare @llvm.aarch64.sve.orr.nxv2i64(,,) +declare @llvm.aarch64.sve.eor.nxv2i8(,,) +declare @llvm.aarch64.sve.eor.nxv2i16(,,) +declare @llvm.aarch64.sve.eor.nxv2i32(,,) +declare @llvm.aarch64.sve.eor.nxv2i64(,,) declare @llvm.aarch64.sve.bic.nxv2i8(,,) declare @llvm.aarch64.sve.bic.nxv2i16(,,) declare @llvm.aarch64.sve.bic.nxv2i32(,,) Index: llvm/test/CodeGen/AArch64/sve-int-log.ll =================================================================== --- llvm/test/CodeGen/AArch64/sve-int-log.ll +++ llvm/test/CodeGen/AArch64/sve-int-log.ll @@ -94,45 +94,3 @@ %res = xor %a, %b ret %res } - -define @bic_d( %a, %b) { -; CHECK-LABEL: bic_d -; CHECK: bic z0.d, z0.d, z1.d -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.bic.base.nxv2i64( %a, - %b) - ret %res -} - -define @bic_s( %a, %b) { -; CHECK-LABEL: bic_s -; CHECK: bic z0.d, z0.d, z1.d -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.bic.base.nxv4i32( %a, - %b) - ret %res -} - -define @bic_h( %a, %b) { -; CHECK-LABEL: bic_h -; CHECK: bic z0.d, z0.d, z1.d -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.bic.base.nxv8i16( %a, - %b) - - ret %res -} - -define @bic_b( %a, %b) { -; CHECK-LABEL: bic_b -; CHECK: bic z0.d, z0.d, z1.d -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.bic.base.nxv16i8( %a, - %b) - ret %res -} - -declare @llvm.aarch64.sve.bic.base.nxv2i64(, ) -declare @llvm.aarch64.sve.bic.base.nxv4i32(, ) -declare @llvm.aarch64.sve.bic.base.nxv8i16(, ) -declare @llvm.aarch64.sve.bic.base.nxv16i8(, ) Index: llvm/test/CodeGen/AArch64/sve-pred-log.ll =================================================================== --- llvm/test/CodeGen/AArch64/sve-pred-log.ll +++ llvm/test/CodeGen/AArch64/sve-pred-log.ll @@ -36,7 +36,7 @@ ; CHECK-LABEL: and_16: ; CHECK: and p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.and.nxv16i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.and.z.nxv16i1( %Pg, %Pn, %Pd) ret %res; } @@ -44,7 +44,7 @@ ; CHECK-LABEL: and_8: ; CHECK: and p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.and.nxv8i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.and.z.nxv8i1( %Pg, %Pn, %Pd) ret %res; } @@ -52,7 +52,7 @@ ; CHECK-LABEL: and_4: ; CHECK: and p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.and.nxv4i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.and.z.nxv4i1( %Pg, %Pn, %Pd) ret %res; } @@ -60,16 +60,15 @@ ; CHECK-LABEL: and_2: ; CHECK: and p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.and.nxv2i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.and.z.nxv2i1( %Pg, %Pn, %Pd) ret %res; } - define @bic_16( %Pg, %Pn, %Pd) { ; CHECK-LABEL: bic_16: ; CHECK: bic p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.bic.pred.nxv16i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.bic.z.nxv16i1( %Pg, %Pn, %Pd) ret %res; } @@ -77,7 +76,7 @@ ; CHECK-LABEL: bic_8: ; CHECK: bic p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.bic.pred.nxv8i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.bic.z.nxv8i1( %Pg, %Pn, %Pd) ret %res; } @@ -85,7 +84,7 @@ ; CHECK-LABEL: bic_4: ; CHECK: bic p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.bic.pred.nxv4i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.bic.z.nxv4i1( %Pg, %Pn, %Pd) ret %res; } @@ -93,7 +92,7 @@ ; CHECK-LABEL: bic_2: ; CHECK: bic p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.bic.pred.nxv2i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.bic.z.nxv2i1( %Pg, %Pn, %Pd) ret %res; } @@ -101,7 +100,7 @@ ; CHECK-LABEL: eor_16: ; CHECK: eor p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.eor.nxv16i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.eor.z.nxv16i1( %Pg, %Pn, %Pd) ret %res; } @@ -109,7 +108,7 @@ ; CHECK-LABEL: eor_8: ; CHECK: eor p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.eor.nxv8i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.eor.z.nxv8i1( %Pg, %Pn, %Pd) ret %res; } @@ -117,7 +116,7 @@ ; CHECK-LABEL: eor_4: ; CHECK: eor p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.eor.nxv4i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.eor.z.nxv4i1( %Pg, %Pn, %Pd) ret %res; } @@ -125,116 +124,15 @@ ; CHECK-LABEL: eor_2: ; CHECK: eor p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.eor.nxv2i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @ands_16( %Pg, %Pn, %Pd) { -; CHECK-LABEL: ands_16: -; CHECK: ands p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.ands.nxv16i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @ands_8( %Pg, %Pn, %Pd) { -; CHECK-LABEL: ands_8: -; CHECK: ands p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.ands.nxv8i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @ands_4( %Pg, %Pn, %Pd) { -; CHECK-LABEL: ands_4: -; CHECK: ands p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.ands.nxv4i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @ands_2( %Pg, %Pn, %Pd) { -; CHECK-LABEL: ands_2: -; CHECK: ands p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.ands.nxv2i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.eor.z.nxv2i1( %Pg, %Pn, %Pd) ret %res; } - -define @bics_16( %Pg, %Pn, %Pd) { -; CHECK-LABEL: bics_16: -; CHECK: bics p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.bics.nxv16i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @bics_8( %Pg, %Pn, %Pd) { -; CHECK-LABEL: bics_8: -; CHECK: bics p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.bics.nxv8i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @bics_4( %Pg, %Pn, %Pd) { -; CHECK-LABEL: bics_4: -; CHECK: bics p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.bics.nxv4i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @bics_2( %Pg, %Pn, %Pd) { -; CHECK-LABEL: bics_2: -; CHECK: bics p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.bics.nxv2i1( %Pg, - %Pn, - %Pd) - ret %res; -} - - -define @eors_16( %Pg, %Pn, %Pd) { -; CHECK-LABEL: eors_16: -; CHECK: eors p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.eors.nxv16i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @eors_8( %Pg, %Pn, %Pd) { -; CHECK-LABEL: eors_8: -; CHECK: eors p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.eors.nxv8i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @eors_4( %Pg, %Pn, %Pd) { -; CHECK-LABEL: eors_4: -; CHECK: eors p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.eors.nxv4i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @eors_2( %Pg, %Pn, %Pd) { -; CHECK-LABEL: eors_2: -; CHECK: eors p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.eors.nxv2i1( %Pg, %Pn, %Pd) - ret %res; -} - - define @orr_16( %Pg, %Pn, %Pd) { ; CHECK-LABEL: orr_16: ; CHECK: orr p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.orr.nxv16i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.orr.z.nxv16i1( %Pg, %Pn, %Pd) ret %res; } @@ -242,7 +140,7 @@ ; CHECK-LABEL: orr_8: ; CHECK: orr p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.orr.nxv8i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.orr.z.nxv8i1( %Pg, %Pn, %Pd) ret %res; } @@ -250,7 +148,7 @@ ; CHECK-LABEL: orr_4: ; CHECK: orr p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.orr.nxv4i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.orr.z.nxv4i1( %Pg, %Pn, %Pd) ret %res; } @@ -258,16 +156,15 @@ ; CHECK-LABEL: orr_2: ; CHECK: orr p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.orr.nxv2i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.orr.z.nxv2i1( %Pg, %Pn, %Pd) ret %res; } - define @orn_16( %Pg, %Pn, %Pd) { ; CHECK-LABEL: orn_16: ; CHECK: orn p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.orn.nxv16i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.orn.z.nxv16i1( %Pg, %Pn, %Pd) ret %res; } @@ -275,7 +172,7 @@ ; CHECK-LABEL: orn_8: ; CHECK: orn p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.orn.nxv8i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.orn.z.nxv8i1( %Pg, %Pn, %Pd) ret %res; } @@ -283,7 +180,7 @@ ; CHECK-LABEL: orn_4: ; CHECK: orn p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.orn.nxv4i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.orn.z.nxv4i1( %Pg, %Pn, %Pd) ret %res; } @@ -291,7 +188,7 @@ ; CHECK-LABEL: orn_2: ; CHECK: orn p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.orn.nxv2i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.orn.z.nxv2i1( %Pg, %Pn, %Pd) ret %res; } @@ -299,7 +196,7 @@ ; CHECK-LABEL: nor_16: ; CHECK: nor p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.nor.nxv16i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.nor.z.nxv16i1( %Pg, %Pn, %Pd) ret %res; } @@ -307,7 +204,7 @@ ; CHECK-LABEL: nor_8: ; CHECK: nor p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.nor.nxv8i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.nor.z.nxv8i1( %Pg, %Pn, %Pd) ret %res; } @@ -315,7 +212,7 @@ ; CHECK-LABEL: nor_4: ; CHECK: nor p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.nor.nxv4i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.nor.z.nxv4i1( %Pg, %Pn, %Pd) ret %res; } @@ -323,7 +220,7 @@ ; CHECK-LABEL: nor_2: ; CHECK: nor p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.nor.nxv2i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.nor.z.nxv2i1( %Pg, %Pn, %Pd) ret %res; } @@ -331,7 +228,7 @@ ; CHECK-LABEL: nand_16: ; CHECK: nand p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.nand.nxv16i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.nand.z.nxv16i1( %Pg, %Pn, %Pd) ret %res; } @@ -339,7 +236,7 @@ ; CHECK-LABEL: nand_8: ; CHECK: nand p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.nand.nxv8i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.nand.z.nxv8i1( %Pg, %Pn, %Pd) ret %res; } @@ -347,7 +244,7 @@ ; CHECK-LABEL: nand_4: ; CHECK: nand p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.nand.nxv4i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.nand.z.nxv4i1( %Pg, %Pn, %Pd) ret %res; } @@ -355,191 +252,35 @@ ; CHECK-LABEL: nand_2: ; CHECK: nand p0.b, p0/z, p1.b, p2.b ; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.nand.nxv2i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @orrs_16( %Pg, %Pn, %Pd) { -; CHECK-LABEL: orrs_16: -; CHECK: orrs p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.orrs.nxv16i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @orrs_8( %Pg, %Pn, %Pd) { -; CHECK-LABEL: orrs_8: -; CHECK: orrs p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.orrs.nxv8i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @orrs_4( %Pg, %Pn, %Pd) { -; CHECK-LABEL: orrs_4: -; CHECK: orrs p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.orrs.nxv4i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @orrs_2( %Pg, %Pn, %Pd) { -; CHECK-LABEL: orrs_2: -; CHECK: orrs p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.orrs.nxv2i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @orns_16( %Pg, %Pn, %Pd) { -; CHECK-LABEL: orns_16: -; CHECK: orns p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.orns.nxv16i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @orns_8( %Pg, %Pn, %Pd) { -; CHECK-LABEL: orns_8: -; CHECK: orns p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.orns.nxv8i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @orns_4( %Pg, %Pn, %Pd) { -; CHECK-LABEL: orns_4: -; CHECK: orns p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.orns.nxv4i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @orns_2( %Pg, %Pn, %Pd) { -; CHECK-LABEL: orns_2: -; CHECK: orns p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.orns.nxv2i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @nors_16( %Pg, %Pn, %Pd) { -; CHECK-LABEL: nors_16: -; CHECK: nors p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.nors.nxv16i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @nors_8( %Pg, %Pn, %Pd) { -; CHECK-LABEL: nors_8: -; CHECK: nors p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.nors.nxv8i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @nors_4( %Pg, %Pn, %Pd) { -; CHECK-LABEL: nors_4: -; CHECK: nors p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.nors.nxv4i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @nors_2( %Pg, %Pn, %Pd) { -; CHECK-LABEL: nors_2: -; CHECK: nors p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.nors.nxv2i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @nands_16( %Pg, %Pn, %Pd) { -; CHECK-LABEL: nands_16: -; CHECK: nands p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.nands.nxv16i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @nands_8( %Pg, %Pn, %Pd) { -; CHECK-LABEL: nands_8: -; CHECK: nands p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.nands.nxv8i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @nands_4( %Pg, %Pn, %Pd) { -; CHECK-LABEL: nands_4: -; CHECK: nands p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.nands.nxv4i1( %Pg, %Pn, %Pd) - ret %res; -} - -define @nands_2( %Pg, %Pn, %Pd) { -; CHECK-LABEL: nands_2: -; CHECK: nands p0.b, p0/z, p1.b, p2.b -; CHECK-NEXT: ret - %res = call @llvm.aarch64.sve.nands.nxv2i1( %Pg, %Pn, %Pd) + %res = call @llvm.aarch64.sve.nand.z.nxv2i1( %Pg, %Pn, %Pd) ret %res; } -declare @llvm.aarch64.sve.and.nxv16i1(, , ) -declare @llvm.aarch64.sve.and.nxv8i1(, , ) -declare @llvm.aarch64.sve.and.nxv4i1(, , ) -declare @llvm.aarch64.sve.and.nxv2i1(, , ) -declare @llvm.aarch64.sve.bic.pred.nxv16i1(, , ) -declare @llvm.aarch64.sve.bic.pred.nxv8i1(, , ) -declare @llvm.aarch64.sve.bic.pred.nxv4i1(, , ) -declare @llvm.aarch64.sve.bic.pred.nxv2i1(, , ) -declare @llvm.aarch64.sve.eor.nxv16i1(, , ) -declare @llvm.aarch64.sve.eor.nxv8i1(, , ) -declare @llvm.aarch64.sve.eor.nxv4i1(, , ) -declare @llvm.aarch64.sve.eor.nxv2i1(, , ) -declare @llvm.aarch64.sve.ands.nxv16i1(, , ) -declare @llvm.aarch64.sve.ands.nxv8i1(, , ) -declare @llvm.aarch64.sve.ands.nxv4i1(, , ) -declare @llvm.aarch64.sve.ands.nxv2i1(, , ) -declare @llvm.aarch64.sve.bics.nxv16i1(, , ) -declare @llvm.aarch64.sve.bics.nxv8i1(, , ) -declare @llvm.aarch64.sve.bics.nxv4i1(, , ) -declare @llvm.aarch64.sve.bics.nxv2i1(, , ) -declare @llvm.aarch64.sve.eors.nxv16i1(, , ) -declare @llvm.aarch64.sve.eors.nxv8i1(, , ) -declare @llvm.aarch64.sve.eors.nxv4i1(, , ) -declare @llvm.aarch64.sve.eors.nxv2i1(, , ) -declare @llvm.aarch64.sve.orr.nxv16i1(, , ) -declare @llvm.aarch64.sve.orr.nxv8i1(, , ) -declare @llvm.aarch64.sve.orr.nxv4i1(, , ) -declare @llvm.aarch64.sve.orr.nxv2i1(, , ) -declare @llvm.aarch64.sve.orn.nxv16i1(, , ) -declare @llvm.aarch64.sve.orn.nxv8i1(, , ) -declare @llvm.aarch64.sve.orn.nxv4i1(, , ) -declare @llvm.aarch64.sve.orn.nxv2i1(, , ) -declare @llvm.aarch64.sve.nor.nxv16i1(, , ) -declare @llvm.aarch64.sve.nor.nxv8i1(, , ) -declare @llvm.aarch64.sve.nor.nxv4i1(, , ) -declare @llvm.aarch64.sve.nor.nxv2i1(, , ) -declare @llvm.aarch64.sve.nand.nxv16i1(, , ) -declare @llvm.aarch64.sve.nand.nxv8i1(, , ) -declare @llvm.aarch64.sve.nand.nxv4i1(, , ) -declare @llvm.aarch64.sve.nand.nxv2i1(, , ) -declare @llvm.aarch64.sve.orrs.nxv16i1(, , ) -declare @llvm.aarch64.sve.orrs.nxv8i1(, , ) -declare @llvm.aarch64.sve.orrs.nxv4i1(, , ) -declare @llvm.aarch64.sve.orrs.nxv2i1(, , ) -declare @llvm.aarch64.sve.orns.nxv16i1(, , ) -declare @llvm.aarch64.sve.orns.nxv8i1(, , ) -declare @llvm.aarch64.sve.orns.nxv4i1(, , ) -declare @llvm.aarch64.sve.orns.nxv2i1(, , ) -declare @llvm.aarch64.sve.nors.nxv16i1(, , ) -declare @llvm.aarch64.sve.nors.nxv8i1(, , ) -declare @llvm.aarch64.sve.nors.nxv4i1(, , ) -declare @llvm.aarch64.sve.nors.nxv2i1(, , ) -declare @llvm.aarch64.sve.nands.nxv16i1(, , ) -declare @llvm.aarch64.sve.nands.nxv8i1(, , ) -declare @llvm.aarch64.sve.nands.nxv4i1(, , ) -declare @llvm.aarch64.sve.nands.nxv2i1(, , ) +declare @llvm.aarch64.sve.and.z.nxv16i1(, , ) +declare @llvm.aarch64.sve.and.z.nxv8i1(, , ) +declare @llvm.aarch64.sve.and.z.nxv4i1(, , ) +declare @llvm.aarch64.sve.and.z.nxv2i1(, , ) +declare @llvm.aarch64.sve.bic.z.nxv16i1(, , ) +declare @llvm.aarch64.sve.bic.z.nxv8i1(, , ) +declare @llvm.aarch64.sve.bic.z.nxv4i1(, , ) +declare @llvm.aarch64.sve.bic.z.nxv2i1(, , ) +declare @llvm.aarch64.sve.eor.z.nxv16i1(, , ) +declare @llvm.aarch64.sve.eor.z.nxv8i1(, , ) +declare @llvm.aarch64.sve.eor.z.nxv4i1(, , ) +declare @llvm.aarch64.sve.eor.z.nxv2i1(, , ) +declare @llvm.aarch64.sve.orr.z.nxv16i1(, , ) +declare @llvm.aarch64.sve.orr.z.nxv8i1(, , ) +declare @llvm.aarch64.sve.orr.z.nxv4i1(, , ) +declare @llvm.aarch64.sve.orr.z.nxv2i1(, , ) +declare @llvm.aarch64.sve.orn.z.nxv16i1(, , ) +declare @llvm.aarch64.sve.orn.z.nxv8i1(, , ) +declare @llvm.aarch64.sve.orn.z.nxv4i1(, , ) +declare @llvm.aarch64.sve.orn.z.nxv2i1(, , ) +declare @llvm.aarch64.sve.nor.z.nxv16i1(, , ) +declare @llvm.aarch64.sve.nor.z.nxv8i1(, , ) +declare @llvm.aarch64.sve.nor.z.nxv4i1(, , ) +declare @llvm.aarch64.sve.nor.z.nxv2i1(, , ) +declare @llvm.aarch64.sve.nand.z.nxv16i1(, , ) +declare @llvm.aarch64.sve.nand.z.nxv8i1(, , ) +declare @llvm.aarch64.sve.nand.z.nxv4i1(, , ) +declare @llvm.aarch64.sve.nand.z.nxv2i1(, , )