Index: llvm/lib/Target/AArch64/GISel/AArch64LegalizerInfo.cpp =================================================================== --- llvm/lib/Target/AArch64/GISel/AArch64LegalizerInfo.cpp +++ llvm/lib/Target/AArch64/GISel/AArch64LegalizerInfo.cpp @@ -767,7 +767,9 @@ .minScalar(0, MinFPScalar); // TODO: Libcall support for s128. - getActionDefinitionsBuilder(G_LROUND).legalFor({{s64, s32}, {s64, s64}}); + // TODO: s16 should be legal with full FP16 support. + getActionDefinitionsBuilder({G_LROUND, G_LLROUND}) + .legalFor({{s64, s32}, {s64, s64}}); getLegacyLegalizerInfo().computeTables(); verify(*ST.getInstrInfo()); Index: llvm/test/CodeGen/AArch64/GlobalISel/legalize-llround.mir =================================================================== --- /dev/null +++ llvm/test/CodeGen/AArch64/GlobalISel/legalize-llround.mir @@ -0,0 +1,39 @@ +# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py +# RUN: llc -mtriple=aarch64 -run-pass=legalizer -verify-machineinstrs %s -o - | FileCheck %s + +... +--- +name: s64_s32_legal +tracksRegLiveness: true +body: | + bb.0: + liveins: $s0 + ; CHECK-LABEL: name: s64_s32_legal + ; CHECK: liveins: $s0 + ; CHECK: %copy:_(s32) = COPY $s0 + ; CHECK: %llround:_(s64) = G_LLROUND %copy(s32) + ; CHECK: %trunc:_(s32) = G_TRUNC %llround(s64) + ; CHECK: $w0 = COPY %trunc(s32) + ; CHECK: RET_ReallyLR implicit $w0 + %copy:_(s32) = COPY $s0 + %llround:_(s64) = G_LLROUND %copy(s32) + %trunc:_(s32) = G_TRUNC %llround + $w0 = COPY %trunc(s32) + RET_ReallyLR implicit $w0 +... +--- +name: s64_s64_legal +tracksRegLiveness: true +body: | + bb.0: + liveins: $d0 + ; CHECK-LABEL: name: s64_s64_legal + ; CHECK: liveins: $d0 + ; CHECK: %copy:_(s64) = COPY $d0 + ; CHECK: %llround:_(s64) = G_LLROUND %copy(s64) + ; CHECK: $x0 = COPY %llround(s64) + ; CHECK: RET_ReallyLR implicit $x0 + %copy:_(s64) = COPY $d0 + %llround:_(s64) = G_LLROUND %copy(s64) + $x0 = COPY %llround + RET_ReallyLR implicit $x0