Index: llvm/lib/Target/AMDGPU/SIOptimizeExecMasking.cpp =================================================================== --- llvm/lib/Target/AMDGPU/SIOptimizeExecMasking.cpp +++ llvm/lib/Target/AMDGPU/SIOptimizeExecMasking.cpp @@ -298,9 +298,20 @@ if (I == E) continue; - // TODO: It's possible to see other terminator copies after the exec copy. - Register CopyToExec = isCopyToExec(*I, ST); - if (!CopyToExec.isValid()) + // It's possible to see other terminator copies after the exec copy. This + // can happen if control flow pseudos had their outputs used by phis. + Register CopyToExec; + + unsigned SearchCount = 0; + const unsigned SearchLimit = 5; + while (I != E && SearchCount++ < SearchLimit) { + CopyToExec = isCopyToExec(*I, ST); + if (CopyToExec) + break; + ++I; + } + + if (!CopyToExec) continue; // Scan backwards to find the def. Index: llvm/test/CodeGen/AMDGPU/optimize-exec-copies-extra-insts-after-copy.mir =================================================================== --- /dev/null +++ llvm/test/CodeGen/AMDGPU/optimize-exec-copies-extra-insts-after-copy.mir @@ -0,0 +1,52 @@ +# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py +# RUN: llc -march=amdgcn -mcpu=fiji -verify-machineinstrs -run-pass=si-optimize-exec-masking -o - %s | FileCheck %s + +# Make sure we can still optimize writes to exec when there are +# additional terminators after the exec write. This can happen with +# phi users of control flow intrinsics. + +--- +name: instructions_after_copy_to_exec +tracksRegLiveness: true +body: | + ; CHECK-LABEL: name: instructions_after_copy_to_exec + ; CHECK: bb.0: + ; CHECK: successors: %bb.2(0x40000000), %bb.1(0x40000000) + ; CHECK: liveins: $vgpr0 + ; CHECK: renamable $vgpr1 = V_MOV_B32_e32 0, implicit $exec + ; CHECK: renamable $vcc = V_CMP_EQ_U32_e64 0, killed $vgpr0, implicit $exec + ; CHECK: $sgpr0_sgpr1 = S_AND_SAVEEXEC_B64 $vcc, implicit-def $exec, implicit-def $scc, implicit $exec + ; CHECK: renamable $sgpr0_sgpr1 = S_XOR_B64 $exec, killed renamable $sgpr0_sgpr1, implicit-def dead $scc + ; CHECK: renamable $sgpr0_sgpr1 = COPY killed renamable $sgpr0_sgpr1, implicit $exec + ; CHECK: S_CBRANCH_EXECZ %bb.2, implicit $exec + ; CHECK: bb.1: + ; CHECK: successors: %bb.2(0x80000000) + ; CHECK: liveins: $sgpr0_sgpr1 + ; CHECK: S_NOP 0, implicit $sgpr0_sgpr1 + ; CHECK: bb.2: + ; CHECK: liveins: $sgpr0_sgpr1 + ; CHECK: S_NOP 0, implicit $sgpr0_sgpr1 + bb.0: + liveins: $vgpr0 + + renamable $vgpr1 = V_MOV_B32_e32 0, implicit $exec + renamable $vcc = V_CMP_EQ_U32_e64 0, killed $vgpr0, implicit $exec + renamable $sgpr0_sgpr1 = COPY $exec, implicit-def $exec + renamable $sgpr2_sgpr3 = S_AND_B64 renamable $sgpr0_sgpr1, renamable $vcc, implicit-def dead $scc + renamable $sgpr0_sgpr1 = S_XOR_B64 renamable $sgpr2_sgpr3, killed renamable $sgpr0_sgpr1, implicit-def dead $scc + $exec = S_MOV_B64_term killed renamable $sgpr2_sgpr3 + renamable $sgpr0_sgpr1 = S_MOV_B64_term killed renamable $sgpr0_sgpr1, implicit $exec + S_CBRANCH_EXECZ %bb.2, implicit $exec + + bb.1: + liveins: $sgpr0_sgpr1 + + S_NOP 0, implicit $sgpr0_sgpr1 + + bb.2: + liveins: $sgpr0_sgpr1 + + S_NOP 0, implicit $sgpr0_sgpr1 + +... +