diff --git a/llvm/lib/Target/AArch64/AArch64TargetTransformInfo.cpp b/llvm/lib/Target/AArch64/AArch64TargetTransformInfo.cpp --- a/llvm/lib/Target/AArch64/AArch64TargetTransformInfo.cpp +++ b/llvm/lib/Target/AArch64/AArch64TargetTransformInfo.cpp @@ -402,6 +402,18 @@ switch (IntrinsicID) { case Intrinsic::aarch64_sve_and_z: break; + case Intrinsic::aarch64_sve_bic_z: + break; + case Intrinsic::aarch64_sve_eor_z: + break; + case Intrinsic::aarch64_sve_nand_z: + break; + case Intrinsic::aarch64_sve_nor_z: + break; + case Intrinsic::aarch64_sve_orn_z: + break; + case Intrinsic::aarch64_sve_orr_z: + break; default: return None; } diff --git a/llvm/test/Transforms/InstCombine/AArch64/sve-intrinsic-to-svbool-binops.ll b/llvm/test/Transforms/InstCombine/AArch64/sve-intrinsic-to-svbool-binops.ll --- a/llvm/test/Transforms/InstCombine/AArch64/sve-intrinsic-to-svbool-binops.ll +++ b/llvm/test/Transforms/InstCombine/AArch64/sve-intrinsic-to-svbool-binops.ll @@ -52,12 +52,90 @@ ret %t3 } +define @try_combine_svbool_binop_bic( %a, %b) { +; CHECK-LABEL: @try_combine_svbool_binop_bic( +; CHECK-NEXT: [[TMP1:%.*]] = call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( [[B:%.*]]) +; CHECK-NEXT: [[TMP2:%.*]] = call @llvm.aarch64.sve.bic.z.nxv8i1( [[A:%.*]], [[TMP1]], [[TMP1]]) +; CHECK-NEXT: ret [[TMP2]] +; + %t1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv8i1( %a) + %t2 = tail call @llvm.aarch64.sve.bic.z.nxv16i1( %t1, %b, %b) + %t3 = tail call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( %t2) + ret %t3 +} + +define @try_combine_svbool_binop_eor( %a, %b) { +; CHECK-LABEL: @try_combine_svbool_binop_eor( +; CHECK-NEXT: [[TMP1:%.*]] = call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( [[B:%.*]]) +; CHECK-NEXT: [[TMP2:%.*]] = call @llvm.aarch64.sve.eor.z.nxv8i1( [[A:%.*]], [[TMP1]], [[TMP1]]) +; CHECK-NEXT: ret [[TMP2]] +; + %t1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv8i1( %a) + %t2 = tail call @llvm.aarch64.sve.eor.z.nxv16i1( %t1, %b, %b) + %t3 = tail call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( %t2) + ret %t3 +} + +define @try_combine_svbool_binop_nand( %a, %b) { +; CHECK-LABEL: @try_combine_svbool_binop_nand( +; CHECK-NEXT: [[TMP1:%.*]] = call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( [[B:%.*]]) +; CHECK-NEXT: [[TMP2:%.*]] = call @llvm.aarch64.sve.nand.z.nxv8i1( [[A:%.*]], [[TMP1]], [[TMP1]]) +; CHECK-NEXT: ret [[TMP2]] +; + %t1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv8i1( %a) + %t2 = tail call @llvm.aarch64.sve.nand.z.nxv16i1( %t1, %b, %b) + %t3 = tail call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( %t2) + ret %t3 +} + +define @try_combine_svbool_binop_nor( %a, %b) { +; CHECK-LABEL: @try_combine_svbool_binop_nor( +; CHECK-NEXT: [[TMP1:%.*]] = call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( [[B:%.*]]) +; CHECK-NEXT: [[TMP2:%.*]] = call @llvm.aarch64.sve.nor.z.nxv8i1( [[A:%.*]], [[TMP1]], [[TMP1]]) +; CHECK-NEXT: ret [[TMP2]] +; + %t1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv8i1( %a) + %t2 = tail call @llvm.aarch64.sve.nor.z.nxv16i1( %t1, %b, %b) + %t3 = tail call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( %t2) + ret %t3 +} + +define @try_combine_svbool_binop_orn( %a, %b) { +; CHECK-LABEL: @try_combine_svbool_binop_orn( +; CHECK-NEXT: [[TMP1:%.*]] = call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( [[B:%.*]]) +; CHECK-NEXT: [[TMP2:%.*]] = call @llvm.aarch64.sve.orn.z.nxv8i1( [[A:%.*]], [[TMP1]], [[TMP1]]) +; CHECK-NEXT: ret [[TMP2]] +; + %t1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv8i1( %a) + %t2 = tail call @llvm.aarch64.sve.orn.z.nxv16i1( %t1, %b, %b) + %t3 = tail call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( %t2) + ret %t3 +} + +define @try_combine_svbool_binop_orr( %a, %b) { +; CHECK-LABEL: @try_combine_svbool_binop_orr( +; CHECK-NEXT: [[TMP1:%.*]] = call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( [[B:%.*]]) +; CHECK-NEXT: [[TMP2:%.*]] = call @llvm.aarch64.sve.orr.z.nxv8i1( [[A:%.*]], [[TMP1]], [[TMP1]]) +; CHECK-NEXT: ret [[TMP2]] +; + %t1 = tail call @llvm.aarch64.sve.convert.to.svbool.nxv8i1( %a) + %t2 = tail call @llvm.aarch64.sve.orr.z.nxv16i1( %t1, %b, %b) + %t3 = tail call @llvm.aarch64.sve.convert.from.svbool.nxv8i1( %t2) + ret %t3 +} + declare @llvm.aarch64.sve.convert.to.svbool.nxv8i1() declare @llvm.aarch64.sve.convert.to.svbool.nxv4i1() declare @llvm.aarch64.sve.convert.to.svbool.nxv2i1() -declare @llvm.aarch64.sve.and.z.nxv16i1(, , ) declare @llvm.aarch64.sve.convert.from.svbool.nxv8i1() declare @llvm.aarch64.sve.convert.from.svbool.nxv4i1() declare @llvm.aarch64.sve.convert.from.svbool.nxv2i1() +declare @llvm.aarch64.sve.and.z.nxv16i1(, , ) +declare @llvm.aarch64.sve.bic.z.nxv16i1(, , ) +declare @llvm.aarch64.sve.eor.z.nxv16i1(, , ) +declare @llvm.aarch64.sve.nand.z.nxv16i1(, , ) +declare @llvm.aarch64.sve.nor.z.nxv16i1(, , ) +declare @llvm.aarch64.sve.orn.z.nxv16i1(, , ) +declare @llvm.aarch64.sve.orr.z.nxv16i1(, , ) attributes #0 = { "target-features"="+sve" }