diff --git a/llvm/lib/Target/AArch64/AArch64Combine.td b/llvm/lib/Target/AArch64/AArch64Combine.td --- a/llvm/lib/Target/AArch64/AArch64Combine.td +++ b/llvm/lib/Target/AArch64/AArch64Combine.td @@ -78,7 +78,7 @@ def AArch64PostLegalizerCombinerHelper : GICombinerHelper<"AArch64GenPostLegalizerCombinerHelper", - [erase_undef_store, combines_for_extload, + [copy_prop, erase_undef_store, combines_for_extload, sext_trunc_sextload, shuffle_vector_pseudos, hoist_logic_op_with_same_opcode_hands]> { let DisableRuleOption = "aarch64postlegalizercombiner-disable-rule"; diff --git a/llvm/test/CodeGen/AArch64/GlobalISel/combine-sext-trunc-sextload.mir b/llvm/test/CodeGen/AArch64/GlobalISel/combine-sext-trunc-sextload.mir --- a/llvm/test/CodeGen/AArch64/GlobalISel/combine-sext-trunc-sextload.mir +++ b/llvm/test/CodeGen/AArch64/GlobalISel/combine-sext-trunc-sextload.mir @@ -12,8 +12,7 @@ ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s64) = G_SEXTLOAD [[COPY]](p0) :: (load 2) ; CHECK: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[SEXTLOAD]](s64) - ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[TRUNC]](s32) - ; CHECK: $w0 = COPY [[COPY1]](s32) + ; CHECK: $w0 = COPY [[TRUNC]](s32) %0:_(p0) = COPY $x0 %1:_(s64) = G_SEXTLOAD %0:_(p0) :: (load 2) %2:_(s32) = G_TRUNC %1:_(s64) @@ -31,9 +30,7 @@ ; CHECK: liveins: $x0 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s32) = G_SEXTLOAD [[COPY]](p0) :: (load 2) - ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[SEXTLOAD]](s32) - ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32) - ; CHECK: $w0 = COPY [[COPY2]](s32) + ; CHECK: $w0 = COPY [[SEXTLOAD]](s32) %0:_(p0) = COPY $x0 %1:_(s32) = G_SEXTLOAD %0:_(p0) :: (load 2) %2:_(s32) = COPY %1:_(s32) @@ -52,8 +49,7 @@ ; CHECK: liveins: $x0 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0 ; CHECK: [[SEXTLOAD:%[0-9]+]]:_(s32) = G_SEXTLOAD [[COPY]](p0) :: (load 2) - ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY [[SEXTLOAD]](s32) - ; CHECK: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY1]], 24 + ; CHECK: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[SEXTLOAD]], 24 ; CHECK: $w0 = COPY [[SEXT_INREG]](s32) %0:_(p0) = COPY $x0 %1:_(s32) = G_SEXTLOAD %0:_(p0) :: (load 2) diff --git a/llvm/test/CodeGen/AArch64/GlobalISel/postlegalizer-combiner-copy-prop.mir b/llvm/test/CodeGen/AArch64/GlobalISel/postlegalizer-combiner-copy-prop.mir new file mode 100644 --- /dev/null +++ b/llvm/test/CodeGen/AArch64/GlobalISel/postlegalizer-combiner-copy-prop.mir @@ -0,0 +1,29 @@ +# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py +# RUN: llc -mtriple aarch64 -run-pass=aarch64-postlegalizer-combiner -verify-machineinstrs %s -o - | FileCheck %s + +... +--- +name: postlegalize_copy_prop +legalized: true +tracksRegLiveness: true +body: | + bb.1.entry: + liveins: $x0, $x1 + + ; CHECK-LABEL: name: postlegalize_copy_prop + ; CHECK: liveins: $x0, $x1 + ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x0 + ; CHECK: [[COPY1:%[0-9]+]]:_(s64) = COPY $x1 + ; CHECK: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY]], [[COPY1]] + ; CHECK: [[ADD1:%[0-9]+]]:_(s64) = G_ADD [[ADD]], [[ADD]] + ; CHECK: $x0 = COPY [[ADD1]](s64) + ; CHECK: RET_ReallyLR + %0:_(s64) = COPY $x0 + %1:_(s64) = COPY $x1 + %2:_(s64) = G_ADD %0, %1 + %3:_(s64) = COPY %2 + %4:_(s64) = G_ADD %3, %3 + $x0 = COPY %4 + RET_ReallyLR + +...