diff --git a/llvm/include/llvm/CodeGen/GlobalISel/CombinerHelper.h b/llvm/include/llvm/CodeGen/GlobalISel/CombinerHelper.h --- a/llvm/include/llvm/CodeGen/GlobalISel/CombinerHelper.h +++ b/llvm/include/llvm/CodeGen/GlobalISel/CombinerHelper.h @@ -107,6 +107,9 @@ bool matchCombineIndexedLoadStore(MachineInstr &MI, IndexedLoadStoreMatchInfo &MatchInfo); void applyCombineIndexedLoadStore(MachineInstr &MI, IndexedLoadStoreMatchInfo &MatchInfo); + bool matchSextTruncSextLoad(MachineInstr &MI, Register &LoadReg); + bool applySextTruncSextLoad(MachineInstr &MI, Register &LoadReg); + bool matchElideBrByInvertingCond(MachineInstr &MI); void applyElideBrByInvertingCond(MachineInstr &MI); bool tryElideBrByInvertingCond(MachineInstr &MI); diff --git a/llvm/include/llvm/Target/GlobalISel/Combine.td b/llvm/include/llvm/Target/GlobalISel/Combine.td --- a/llvm/include/llvm/Target/GlobalISel/Combine.td +++ b/llvm/include/llvm/Target/GlobalISel/Combine.td @@ -114,6 +114,13 @@ (apply [{ Helper.applyCombineExtendingLoads(*${root}, ${matchinfo}); }])>; def combines_for_extload: GICombineGroup<[extending_loads]>; +def sext_trunc_sextload_matchinfo: GIDefMatchData<"Register">; +def sext_trunc_sextload : GICombineRule< + (defs root:$d, sext_trunc_sextload_matchinfo:$matchinfo), + (match (wip_match_opcode G_SEXT_INREG):$d, + [{ return Helper.matchSextTruncSextLoad(*${d}, ${matchinfo}); }]), + (apply [{ Helper.applySextTruncSextLoad(*${d}, ${matchinfo}); }])>; + def combine_indexed_load_store : GICombineRule< (defs root:$root, indexed_load_store_matchdata:$matchinfo), (match (wip_match_opcode G_LOAD, G_SEXTLOAD, G_ZEXTLOAD, G_STORE):$root, diff --git a/llvm/lib/CodeGen/GlobalISel/CombinerHelper.cpp b/llvm/lib/CodeGen/GlobalISel/CombinerHelper.cpp --- a/llvm/lib/CodeGen/GlobalISel/CombinerHelper.cpp +++ b/llvm/lib/CodeGen/GlobalISel/CombinerHelper.cpp @@ -576,6 +576,41 @@ return isPredecessor(DefMI, UseMI); } +bool CombinerHelper::matchSextTruncSextLoad(MachineInstr &MI, + Register &LoadReg) { + assert(MI.getOpcode() == TargetOpcode::G_SEXT_INREG); + Register SrcReg = MI.getOperand(1).getReg(); + Register LoadUser = SrcReg; + + Register TruncSrc; + if (mi_match(SrcReg, MRI, m_GTrunc(m_Reg(TruncSrc)))) + LoadUser = TruncSrc; + + uint64_t SizeInBits = MI.getOperand(2).getImm(); + // If the source is a G_SEXTLOAD from the same bit width, then we don't + // need any extend at all, just a truncate. + if (auto *LoadMI = getOpcodeDef(TargetOpcode::G_SEXTLOAD, LoadUser, MRI)) { + const auto &MMO = **LoadMI->memoperands_begin(); + // If truncating more than the original extended value, abort. + if (TruncSrc && MRI.getType(TruncSrc).getSizeInBits() < MMO.getSizeInBits()) + return false; + if (MMO.getSizeInBits() == SizeInBits) { + LoadReg = LoadMI->getOperand(0).getReg(); + return true; + } + } + return false; +} + +bool CombinerHelper::applySextTruncSextLoad(MachineInstr &MI, + Register &LoadReg) { + assert(MI.getOpcode() == TargetOpcode::G_SEXT_INREG); + MachineIRBuilder MIB(MI); + MIB.buildAnyExtOrTrunc(MI.getOperand(0).getReg(), LoadReg); + MI.eraseFromParent(); + return true; +} + bool CombinerHelper::findPostIndexCandidate(MachineInstr &MI, Register &Addr, Register &Base, Register &Offset) { auto &MF = *MI.getParent()->getParent(); diff --git a/llvm/lib/Target/AArch64/AArch64Combine.td b/llvm/lib/Target/AArch64/AArch64Combine.td --- a/llvm/lib/Target/AArch64/AArch64Combine.td +++ b/llvm/lib/Target/AArch64/AArch64Combine.td @@ -77,6 +77,6 @@ def AArch64PostLegalizerCombinerHelper : GICombinerHelper<"AArch64GenPostLegalizerCombinerHelper", [erase_undef_store, combines_for_extload, - shuffle_vector_pseudos]> { + sext_trunc_sextload, shuffle_vector_pseudos]> { let DisableRuleOption = "aarch64postlegalizercombiner-disable-rule"; } diff --git a/llvm/test/CodeGen/AArch64/GlobalISel/combine-sext-trunc-sextload.mir b/llvm/test/CodeGen/AArch64/GlobalISel/combine-sext-trunc-sextload.mir new file mode 100644 --- /dev/null +++ b/llvm/test/CodeGen/AArch64/GlobalISel/combine-sext-trunc-sextload.mir @@ -0,0 +1,61 @@ +# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py +# RUN: llc -mtriple aarch64 -run-pass=aarch64-postlegalizer-combiner -verify-machineinstrs %s -o - | FileCheck %s +--- +name: test_combine_sext_trunc_of_sextload +legalized: true +tracksRegLiveness: true +body: | + bb.0.entry: + liveins: $x0 + ; CHECK-LABEL: name: test_combine_sext_trunc_of_sextload + ; CHECK: liveins: $x0 + ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 + ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s64) = G_SEXTLOAD [[COPY]](p0) :: (load 2) + ; CHECK: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[SEXTLOAD]](s64) + ; CHECK: $w0 = COPY [[TRUNC]](s32) + %0:_(p0) = COPY $x0 + %1:_(s64) = G_SEXTLOAD %0:_(p0) :: (load 2) + %2:_(s32) = G_TRUNC %1:_(s64) + %3:_(s32) = G_SEXT_INREG %2:_(s32), 16 + $w0 = COPY %3(s32) +... +--- +name: test_combine_sext_of_sextload +legalized: true +tracksRegLiveness: true +body: | + bb.0.entry: + liveins: $x0 + ; CHECK-LABEL: name: test_combine_sext_of_sextload + ; CHECK: liveins: $x0 + ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 + ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s32) = G_SEXTLOAD [[COPY]](p0) :: (load 2) + ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[SEXTLOAD]](s32) + ; CHECK: $w0 = COPY [[COPY1]](s32) + %0:_(p0) = COPY $x0 + %1:_(s32) = G_SEXTLOAD %0:_(p0) :: (load 2) + %2:_(s32) = COPY %1:_(s32) + %3:_(s32) = G_SEXT_INREG %2:_(s32), 16 + $w0 = COPY %3(s32) +... +--- +name: test_combine_sext_of_sextload_not_matching +legalized: true +tracksRegLiveness: true +body: | + bb.0.entry: + liveins: $x0 + ; Here we're trying to extend from a smaller value than was extended in the load. + ; CHECK-LABEL: name: test_combine_sext_of_sextload_not_matching + ; CHECK: liveins: $x0 + ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 + ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s32) = G_SEXTLOAD [[COPY]](p0) :: (load 2) + ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[SEXTLOAD]](s32) + ; CHECK: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY1]], 24 + ; CHECK: $w0 = COPY [[SEXT_INREG]](s32) + %0:_(p0) = COPY $x0 + %1:_(s32) = G_SEXTLOAD %0:_(p0) :: (load 2) + %2:_(s32) = COPY %1:_(s32) + %3:_(s32) = G_SEXT_INREG %2:_(s32), 24 + $w0 = COPY %3(s32) +...