diff --git a/clang/test/CodeGen/aarch64-sve-intrinsics/acle_sve_cntb.c b/clang/test/CodeGen/aarch64-sve-intrinsics/acle_sve_cntb.c --- a/clang/test/CodeGen/aarch64-sve-intrinsics/acle_sve_cntb.c +++ b/clang/test/CodeGen/aarch64-sve-intrinsics/acle_sve_cntb.c @@ -7,8 +7,9 @@ uint64_t test_svcntb() { // CHECK-LABEL: test_svcntb - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cntb(i32 31) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.vscale.i64() + // CHECK-NEXT: %[[RET:.*]] = shl i64 %[[INTRINSIC]], 4 + // CHECK: ret i64 %[[RET]] return svcntb(); } @@ -23,72 +24,63 @@ uint64_t test_svcntb_pat_1() { // CHECK-LABEL: test_svcntb_pat_1 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cntb(i32 1) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: ret i64 1 return svcntb_pat(SV_VL1); } uint64_t test_svcntb_pat_2() { // CHECK-LABEL: test_svcntb_pat_2 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cntb(i32 2) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: ret i64 2 return svcntb_pat(SV_VL2); } uint64_t test_svcntb_pat_3() { // CHECK-LABEL: test_svcntb_pat_3 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cntb(i32 3) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: ret i64 3 return svcntb_pat(SV_VL3); } uint64_t test_svcntb_pat_4() { // CHECK-LABEL: test_svcntb_pat_4 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cntb(i32 4) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: ret i64 4 return svcntb_pat(SV_VL4); } uint64_t test_svcntb_pat_5() { // CHECK-LABEL: test_svcntb_pat_5 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cntb(i32 5) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: ret i64 5 return svcntb_pat(SV_VL5); } uint64_t test_svcntb_pat_6() { // CHECK-LABEL: test_svcntb_pat_6 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cntb(i32 6) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: ret i64 6 return svcntb_pat(SV_VL6); } uint64_t test_svcntb_pat_7() { // CHECK-LABEL: test_svcntb_pat_7 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cntb(i32 7) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: ret i64 7 return svcntb_pat(SV_VL7); } uint64_t test_svcntb_pat_8() { // CHECK-LABEL: test_svcntb_pat_8 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cntb(i32 8) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: ret i64 8 return svcntb_pat(SV_VL8); } uint64_t test_svcntb_pat_9() { // CHECK-LABEL: test_svcntb_pat_9 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cntb(i32 9) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: ret i64 16 return svcntb_pat(SV_VL16); } @@ -143,7 +135,8 @@ uint64_t test_svcntb_pat_16() { // CHECK-LABEL: test_svcntb_pat_16 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cntb(i32 31) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.vscale.i64() + // CHECK-NEXT: %[[RET:.*]] = shl i64 %[[INTRINSIC]], 4 + // CHECK: ret i64 %[[RET]] return svcntb_pat(SV_ALL); } diff --git a/clang/test/CodeGen/aarch64-sve-intrinsics/acle_sve_cntd.c b/clang/test/CodeGen/aarch64-sve-intrinsics/acle_sve_cntd.c --- a/clang/test/CodeGen/aarch64-sve-intrinsics/acle_sve_cntd.c +++ b/clang/test/CodeGen/aarch64-sve-intrinsics/acle_sve_cntd.c @@ -7,8 +7,9 @@ uint64_t test_svcntd() { // CHECK-LABEL: test_svcntd - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cntd(i32 31) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.vscale.i64() + // CHECK-NEXT: %[[RET:.*]] = shl i64 %[[INTRINSIC]], 1 + // CHECK: ret i64 %[[RET]] return svcntd(); } @@ -23,16 +24,14 @@ uint64_t test_svcntd_pat_1() { // CHECK-LABEL: test_svcntd_pat_1 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cntd(i32 1) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: ret i64 1 return svcntd_pat(SV_VL1); } uint64_t test_svcntd_pat_2() { // CHECK-LABEL: test_svcntd_pat_2 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cntd(i32 2) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: ret i64 2 return svcntd_pat(SV_VL2); } @@ -143,7 +142,8 @@ uint64_t test_svcntd_pat_16() { // CHECK-LABEL: test_svcntd_pat_16 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cntd(i32 31) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.vscale.i64() + // CHECK-NEXT: %[[RET:.*]] = shl i64 %[[INTRINSIC]], 1 + // CHECK: ret i64 %[[RET]] return svcntd_pat(SV_ALL); } diff --git a/clang/test/CodeGen/aarch64-sve-intrinsics/acle_sve_cnth.c b/clang/test/CodeGen/aarch64-sve-intrinsics/acle_sve_cnth.c --- a/clang/test/CodeGen/aarch64-sve-intrinsics/acle_sve_cnth.c +++ b/clang/test/CodeGen/aarch64-sve-intrinsics/acle_sve_cnth.c @@ -7,8 +7,9 @@ uint64_t test_svcnth() { // CHECK-LABEL: test_svcnth - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cnth(i32 31) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.vscale.i64() + // CHECK-NEXT: %[[RET:.*]] = shl i64 %[[INTRINSIC]], 3 + // CHECK: ret i64 %[[RET]] return svcnth(); } @@ -23,64 +24,56 @@ uint64_t test_svcnth_pat_1() { // CHECK-LABEL: test_svcnth_pat_1 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cnth(i32 1) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: ret i64 1 return svcnth_pat(SV_VL1); } uint64_t test_svcnth_pat_2() { // CHECK-LABEL: test_svcnth_pat_2 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cnth(i32 2) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: ret i64 2 return svcnth_pat(SV_VL2); } uint64_t test_svcnth_pat_3() { // CHECK-LABEL: test_svcnth_pat_3 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cnth(i32 3) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: ret i64 3 return svcnth_pat(SV_VL3); } uint64_t test_svcnth_pat_4() { // CHECK-LABEL: test_svcnth_pat_4 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cnth(i32 4) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: ret i64 4 return svcnth_pat(SV_VL4); } uint64_t test_svcnth_pat_5() { // CHECK-LABEL: test_svcnth_pat_5 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cnth(i32 5) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: ret i64 5 return svcnth_pat(SV_VL5); } uint64_t test_svcnth_pat_6() { // CHECK-LABEL: test_svcnth_pat_6 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cnth(i32 6) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: ret i64 6 return svcnth_pat(SV_VL6); } uint64_t test_svcnth_pat_7() { // CHECK-LABEL: test_svcnth_pat_7 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cnth(i32 7) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: ret i64 7 return svcnth_pat(SV_VL7); } uint64_t test_svcnth_pat_8() { // CHECK-LABEL: test_svcnth_pat_8 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cnth(i32 8) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: ret i64 8 return svcnth_pat(SV_VL8); } @@ -143,7 +136,8 @@ uint64_t test_svcnth_pat_16() { // CHECK-LABEL: test_svcnth_pat_16 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cnth(i32 31) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.vscale.i64() + // CHECK-NEXT: %[[RET:.*]] = shl i64 %[[INTRINSIC]], 3 + // CHECK: ret i64 %[[RET]] return svcnth_pat(SV_ALL); } diff --git a/clang/test/CodeGen/aarch64-sve-intrinsics/acle_sve_cntw.c b/clang/test/CodeGen/aarch64-sve-intrinsics/acle_sve_cntw.c --- a/clang/test/CodeGen/aarch64-sve-intrinsics/acle_sve_cntw.c +++ b/clang/test/CodeGen/aarch64-sve-intrinsics/acle_sve_cntw.c @@ -7,8 +7,9 @@ uint64_t test_svcntw() { // CHECK-LABEL: test_svcntw - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cntw(i32 31) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.vscale.i64() + // CHECK-NEXT: %[[RET:.*]] = shl i64 %[[INTRINSIC]], 2 + // CHECK: ret i64 %[[RET]] return svcntw(); } @@ -23,32 +24,28 @@ uint64_t test_svcntw_pat_1() { // CHECK-LABEL: test_svcntw_pat_1 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cntw(i32 1) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: ret i64 1 return svcntw_pat(SV_VL1); } uint64_t test_svcntw_pat_2() { // CHECK-LABEL: test_svcntw_pat_2 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cntw(i32 2) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: ret i64 2 return svcntw_pat(SV_VL2); } uint64_t test_svcntw_pat_3() { // CHECK-LABEL: test_svcntw_pat_3 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cntw(i32 3) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: ret i64 3 return svcntw_pat(SV_VL3); } uint64_t test_svcntw_pat_4() { // CHECK-LABEL: test_svcntw_pat_4 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cntw(i32 4) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: ret i64 4 return svcntw_pat(SV_VL4); } @@ -143,7 +140,8 @@ uint64_t test_svcntw_pat_16() { // CHECK-LABEL: test_svcntw_pat_16 - // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.aarch64.sve.cntw(i32 31) - // CHECK: ret i64 %[[INTRINSIC]] + // CHECK: %[[INTRINSIC:.*]] = call i64 @llvm.vscale.i64() + // CHECK-NEXT: %[[RET:.*]] = shl i64 %[[INTRINSIC]], 2 + // CHECK: ret i64 %[[RET]] return svcntw_pat(SV_ALL); } diff --git a/llvm/lib/Target/AArch64/AArch64TargetTransformInfo.cpp b/llvm/lib/Target/AArch64/AArch64TargetTransformInfo.cpp --- a/llvm/lib/Target/AArch64/AArch64TargetTransformInfo.cpp +++ b/llvm/lib/Target/AArch64/AArch64TargetTransformInfo.cpp @@ -642,6 +642,30 @@ return IC.replaceInstUsesWith(II, RDFFR); } +static Optional +instCombineSVECntElts(InstCombiner &IC, IntrinsicInst &II, unsigned NumElts) { + const auto Pattern = cast(II.getArgOperand(0))->getZExtValue(); + + if (Pattern == AArch64SVEPredPattern::all) { + LLVMContext &Ctx = II.getContext(); + IRBuilder<> Builder(Ctx); + Builder.SetInsertPoint(&II); + + Constant *StepVal = ConstantInt::get(II.getType(), NumElts); + auto *VScale = Builder.CreateVScale(StepVal); + VScale->takeName(&II); + return IC.replaceInstUsesWith(II, VScale); + } else if (Pattern >= AArch64SVEPredPattern::vl1 && + Pattern <= AArch64SVEPredPattern::vl8 && NumElts >= Pattern) { + Constant *StepVal = ConstantInt::get(II.getType(), Pattern); + return IC.replaceInstUsesWith(II, StepVal); + } else if (Pattern == AArch64SVEPredPattern::vl16 && NumElts == 16) { + Constant *StepVal = ConstantInt::get(II.getType(), NumElts); + return IC.replaceInstUsesWith(II, StepVal); + } + return None; +} + Optional AArch64TTIImpl::instCombineIntrinsic(InstCombiner &IC, IntrinsicInst &II) const { @@ -661,6 +685,14 @@ case Intrinsic::aarch64_sve_lasta: case Intrinsic::aarch64_sve_lastb: return instCombineSVELast(IC, II); + case Intrinsic::aarch64_sve_cntd: + return instCombineSVECntElts(IC, II, 2); + case Intrinsic::aarch64_sve_cntw: + return instCombineSVECntElts(IC, II, 4); + case Intrinsic::aarch64_sve_cnth: + return instCombineSVECntElts(IC, II, 8); + case Intrinsic::aarch64_sve_cntb: + return instCombineSVECntElts(IC, II, 16); } return None; diff --git a/llvm/test/Transforms/InstCombine/AArch64/sve-intrinsic-opts-counting-elems.ll b/llvm/test/Transforms/InstCombine/AArch64/sve-intrinsic-opts-counting-elems.ll new file mode 100644 --- /dev/null +++ b/llvm/test/Transforms/InstCombine/AArch64/sve-intrinsic-opts-counting-elems.ll @@ -0,0 +1,247 @@ +; NOTE: Assertions have been autogenerated by utils/update_test_checks.py +; RUN: opt -S -instcombine < %s | FileCheck %s + +target triple = "aarch64-unknown-linux-gnu" + +; +; CNTB +; + +define i64 @cntb_vl1() { +; CHECK-LABEL: @cntb_vl1( +; CHECK-NEXT: ret i64 1 +; + %out = call i64 @llvm.aarch64.sve.cntb(i32 1) + ret i64 %out +} + +define i64 @cntb_vl2() { +; CHECK-LABEL: @cntb_vl2( +; CHECK-NEXT: ret i64 2 +; + %out = call i64 @llvm.aarch64.sve.cntb(i32 2) + ret i64 %out +} + +define i64 @cntb_vl4() { +; CHECK-LABEL: @cntb_vl4( +; CHECK-NEXT: ret i64 4 +; + %out = call i64 @llvm.aarch64.sve.cntb(i32 4) + ret i64 %out +} + +define i64 @cntb_mul3() { +; CHECK-LABEL: @cntb_mul3( +; CHECK-NEXT: ret i64 24 +; + %cnt = call i64 @llvm.aarch64.sve.cntb(i32 8) + %out = mul i64 %cnt, 3 + ret i64 %out +} + +define i64 @cntb_mul4() { +; CHECK-LABEL: @cntb_mul4( +; CHECK-NEXT: ret i64 64 +; + %cnt = call i64 @llvm.aarch64.sve.cntb(i32 9) + %out = mul i64 %cnt, 4 + ret i64 %out +} + +define i64 @cntb_all() { +; CHECK-LABEL: @cntb_all( +; CHECK-NEXT: [[TMP1:%.*]] = call i64 @llvm.vscale.i64() +; CHECK-NEXT: [[OUT:%.*]] = shl i64 [[TMP1]], 4 +; CHECK-NEXT: ret i64 [[OUT]] +; + %out = call i64 @llvm.aarch64.sve.cntb(i32 31) + ret i64 %out +} + +; +; CNTH +; + +define i64 @cnth_vl1() { +; CHECK-LABEL: @cnth_vl1( +; CHECK-NEXT: ret i64 1 +; + %out = call i64 @llvm.aarch64.sve.cnth(i32 1) + ret i64 %out +} + +define i64 @cnth_vl2() { +; CHECK-LABEL: @cnth_vl2( +; CHECK-NEXT: ret i64 2 +; + %out = call i64 @llvm.aarch64.sve.cnth(i32 2) + ret i64 %out +} + +define i64 @cnth_vl4() { +; CHECK-LABEL: @cnth_vl4( +; CHECK-NEXT: ret i64 4 +; + %out = call i64 @llvm.aarch64.sve.cnth(i32 4) + ret i64 %out +} + +define i64 @cnth_mul3() { +; CHECK-LABEL: @cnth_mul3( +; CHECK-NEXT: ret i64 24 +; + %cnt = call i64 @llvm.aarch64.sve.cnth(i32 8) + %out = mul i64 %cnt, 3 + ret i64 %out +} + +define i64 @cnth_mul4() { +; CHECK-LABEL: @cnth_mul4( +; CHECK-NEXT: [[CNT:%.*]] = call i64 @llvm.aarch64.sve.cnth(i32 9) +; CHECK-NEXT: [[OUT:%.*]] = shl i64 [[CNT]], 2 +; CHECK-NEXT: ret i64 [[OUT]] +; + %cnt = call i64 @llvm.aarch64.sve.cnth(i32 9) + %out = mul i64 %cnt, 4 + ret i64 %out +} + +define i64 @cnth_all() { +; CHECK-LABEL: @cnth_all( +; CHECK-NEXT: [[TMP1:%.*]] = call i64 @llvm.vscale.i64() +; CHECK-NEXT: [[OUT:%.*]] = shl i64 [[TMP1]], 3 +; CHECK-NEXT: ret i64 [[OUT]] +; + %out = call i64 @llvm.aarch64.sve.cnth(i32 31) + ret i64 %out +} + +; +; CNTW +; + +define i64 @cntw_vl1() { +; CHECK-LABEL: @cntw_vl1( +; CHECK-NEXT: ret i64 1 +; + %out = call i64 @llvm.aarch64.sve.cntw(i32 1) + ret i64 %out +} + +define i64 @cntw_vl2() { +; CHECK-LABEL: @cntw_vl2( +; CHECK-NEXT: ret i64 2 +; + %out = call i64 @llvm.aarch64.sve.cntw(i32 2) + ret i64 %out +} + +define i64 @cntw_vl4() { +; CHECK-LABEL: @cntw_vl4( +; CHECK-NEXT: ret i64 4 +; + %out = call i64 @llvm.aarch64.sve.cntw(i32 4) + ret i64 %out +} + +define i64 @cntw_mul3() { +; CHECK-LABEL: @cntw_mul3( +; CHECK-NEXT: [[CNT:%.*]] = call i64 @llvm.aarch64.sve.cntw(i32 8) +; CHECK-NEXT: [[OUT:%.*]] = mul i64 [[CNT]], 3 +; CHECK-NEXT: ret i64 [[OUT]] +; + %cnt = call i64 @llvm.aarch64.sve.cntw(i32 8) + %out = mul i64 %cnt, 3 + ret i64 %out +} + +define i64 @cntw_mul4() { +; CHECK-LABEL: @cntw_mul4( +; CHECK-NEXT: [[CNT:%.*]] = call i64 @llvm.aarch64.sve.cntw(i32 9) +; CHECK-NEXT: [[OUT:%.*]] = shl i64 [[CNT]], 2 +; CHECK-NEXT: ret i64 [[OUT]] +; + %cnt = call i64 @llvm.aarch64.sve.cntw(i32 9) + %out = mul i64 %cnt, 4 + ret i64 %out +} + +define i64 @cntw_all() { +; CHECK-LABEL: @cntw_all( +; CHECK-NEXT: [[TMP1:%.*]] = call i64 @llvm.vscale.i64() +; CHECK-NEXT: [[OUT:%.*]] = shl i64 [[TMP1]], 2 +; CHECK-NEXT: ret i64 [[OUT]] +; + %out = call i64 @llvm.aarch64.sve.cntw(i32 31) + ret i64 %out +} + + +; +; CNTD +; + +define i64 @cntd_vl1() { +; CHECK-LABEL: @cntd_vl1( +; CHECK-NEXT: ret i64 1 +; + %out = call i64 @llvm.aarch64.sve.cntd(i32 1) + ret i64 %out +} + +define i64 @cntd_vl2() { +; CHECK-LABEL: @cntd_vl2( +; CHECK-NEXT: ret i64 2 +; + %out = call i64 @llvm.aarch64.sve.cntd(i32 2) + ret i64 %out +} + +define i64 @cntd_vl4() { +; CHECK-LABEL: @cntd_vl4( +; CHECK-NEXT: [[OUT:%.*]] = call i64 @llvm.aarch64.sve.cntd(i32 4) +; CHECK-NEXT: ret i64 [[OUT]] +; + %out = call i64 @llvm.aarch64.sve.cntd(i32 4) + ret i64 %out +} + +define i64 @cntd_mul3() { +; CHECK-LABEL: @cntd_mul3( +; CHECK-NEXT: [[CNT:%.*]] = call i64 @llvm.aarch64.sve.cntd(i32 8) +; CHECK-NEXT: [[OUT:%.*]] = mul i64 [[CNT]], 3 +; CHECK-NEXT: ret i64 [[OUT]] +; + %cnt = call i64 @llvm.aarch64.sve.cntd(i32 8) + %out = mul i64 %cnt, 3 + ret i64 %out +} + +define i64 @cntd_mul4() { +; CHECK-LABEL: @cntd_mul4( +; CHECK-NEXT: [[CNT:%.*]] = call i64 @llvm.aarch64.sve.cntd(i32 9) +; CHECK-NEXT: [[OUT:%.*]] = shl i64 [[CNT]], 2 +; CHECK-NEXT: ret i64 [[OUT]] +; + %cnt = call i64 @llvm.aarch64.sve.cntd(i32 9) + %out = mul i64 %cnt, 4 + ret i64 %out +} + +define i64 @cntd_all() { +; CHECK-LABEL: @cntd_all( +; CHECK-NEXT: [[TMP1:%.*]] = call i64 @llvm.vscale.i64() +; CHECK-NEXT: [[OUT:%.*]] = shl i64 [[TMP1]], 1 +; CHECK-NEXT: ret i64 [[OUT]] +; + %out = call i64 @llvm.aarch64.sve.cntd(i32 31) + ret i64 %out +} + + +declare i64 @llvm.aarch64.sve.cntb(i32 %pattern) +declare i64 @llvm.aarch64.sve.cnth(i32 %pattern) +declare i64 @llvm.aarch64.sve.cntw(i32 %pattern) +declare i64 @llvm.aarch64.sve.cntd(i32 %pattern) +