diff --git a/llvm/lib/Target/AArch64/AArch64FastISel.cpp b/llvm/lib/Target/AArch64/AArch64FastISel.cpp --- a/llvm/lib/Target/AArch64/AArch64FastISel.cpp +++ b/llvm/lib/Target/AArch64/AArch64FastISel.cpp @@ -53,6 +53,7 @@ #include "llvm/IR/Instructions.h" #include "llvm/IR/IntrinsicInst.h" #include "llvm/IR/Intrinsics.h" +#include "llvm/IR/IntrinsicsAArch64.h" #include "llvm/IR/Operator.h" #include "llvm/IR/Type.h" #include "llvm/IR/User.h" @@ -3776,6 +3777,57 @@ updateValueMap(II, ResultReg1, 2); return true; } + case Intrinsic::aarch64_crc32b: + case Intrinsic::aarch64_crc32h: + case Intrinsic::aarch64_crc32w: + case Intrinsic::aarch64_crc32x: + case Intrinsic::aarch64_crc32cb: + case Intrinsic::aarch64_crc32ch: + case Intrinsic::aarch64_crc32cw: + case Intrinsic::aarch64_crc32cx: { + if (!Subtarget->hasCRC()) + return false; + + unsigned Opc; + switch (II->getIntrinsicID()) { + default: + llvm_unreachable("Unexpected intrinsic!"); + case Intrinsic::aarch64_crc32b: + Opc = AArch64::CRC32Brr; + break; + case Intrinsic::aarch64_crc32h: + Opc = AArch64::CRC32Hrr; + break; + case Intrinsic::aarch64_crc32w: + Opc = AArch64::CRC32Wrr; + break; + case Intrinsic::aarch64_crc32x: + Opc = AArch64::CRC32Xrr; + break; + case Intrinsic::aarch64_crc32cb: + Opc = AArch64::CRC32CBrr; + break; + case Intrinsic::aarch64_crc32ch: + Opc = AArch64::CRC32CHrr; + break; + case Intrinsic::aarch64_crc32cw: + Opc = AArch64::CRC32CWrr; + break; + case Intrinsic::aarch64_crc32cx: + Opc = AArch64::CRC32CXrr; + break; + } + + Register LHSReg = getRegForValue(II->getArgOperand(0)); + Register RHSReg = getRegForValue(II->getArgOperand(1)); + if (!LHSReg || !RHSReg) + return false; + + Register ResultReg = + fastEmitInst_rr(Opc, &AArch64::GPR32RegClass, LHSReg, RHSReg); + updateValueMap(II, ResultReg); + return true; + } } return false; } diff --git a/llvm/test/CodeGen/AArch64/arm64-fast-isel-crc32.ll b/llvm/test/CodeGen/AArch64/arm64-fast-isel-crc32.ll new file mode 100644 --- /dev/null +++ b/llvm/test/CodeGen/AArch64/arm64-fast-isel-crc32.ll @@ -0,0 +1,71 @@ +; RUN: llc -mtriple=arm64-eabi -fast-isel -fast-isel-abort=3 -mattr=+crc -o - %s | FileCheck %s +; RUN: llc -mtriple=arm64-eabi -fast-isel -fast-isel-abort=3 -mattr=+v8r -o - %s | FileCheck %s +; RUN: llc -mtriple=arm64-eabi -fast-isel -fast-isel-abort=3 -mcpu=cortex-a53 -mattr=+crc -o - %s | FileCheck %s + +; Note: tests are a copy of arm64-crc32.ll + +define i32 @test_crc32b(i32 %cur, i32 %next) { +; CHECK-LABEL: test_crc32b: +; CHECK: crc32b w0, w0, w1 + %val = call i32 @llvm.aarch64.crc32b(i32 %cur, i32 %next) + ret i32 %val +} + +define i32 @test_crc32h(i32 %cur, i32 %next) { +; CHECK-LABEL: test_crc32h: +; CHECK: crc32h w0, w0, w1 + %val = call i32 @llvm.aarch64.crc32h(i32 %cur, i32 %next) + ret i32 %val +} + +define i32 @test_crc32w(i32 %cur, i32 %next) { +; CHECK-LABEL: test_crc32w: +; CHECK: crc32w w0, w0, w1 + %val = call i32 @llvm.aarch64.crc32w(i32 %cur, i32 %next) + ret i32 %val +} + +define i32 @test_crc32x(i32 %cur, i64 %next) { +; CHECK-LABEL: test_crc32x: +; CHECK: crc32x w0, w0, x1 + %val = call i32 @llvm.aarch64.crc32x(i32 %cur, i64 %next) + ret i32 %val +} + +define i32 @test_crc32cb(i32 %cur, i32 %next) { +; CHECK-LABEL: test_crc32cb: +; CHECK: crc32cb w0, w0, w1 + %val = call i32 @llvm.aarch64.crc32cb(i32 %cur, i32 %next) + ret i32 %val +} + +define i32 @test_crc32ch(i32 %cur, i32 %next) { +; CHECK-LABEL: test_crc32ch: +; CHECK: crc32ch w0, w0, w1 + %val = call i32 @llvm.aarch64.crc32ch(i32 %cur, i32 %next) + ret i32 %val +} + +define i32 @test_crc32cw(i32 %cur, i32 %next) { +; CHECK-LABEL: test_crc32cw: +; CHECK: crc32cw w0, w0, w1 + %val = call i32 @llvm.aarch64.crc32cw(i32 %cur, i32 %next) + ret i32 %val +} + +define i32 @test_crc32cx(i32 %cur, i64 %next) { +; CHECK-LABEL: test_crc32cx: +; CHECK: crc32cx w0, w0, x1 + %val = call i32 @llvm.aarch64.crc32cx(i32 %cur, i64 %next) + ret i32 %val +} + +declare i32 @llvm.aarch64.crc32b(i32, i32) +declare i32 @llvm.aarch64.crc32h(i32, i32) +declare i32 @llvm.aarch64.crc32w(i32, i32) +declare i32 @llvm.aarch64.crc32x(i32, i64) + +declare i32 @llvm.aarch64.crc32cb(i32, i32) +declare i32 @llvm.aarch64.crc32ch(i32, i32) +declare i32 @llvm.aarch64.crc32cw(i32, i32) +declare i32 @llvm.aarch64.crc32cx(i32, i64)