Index: lib/Target/AMDGPU/SILowerControlFlow.cpp =================================================================== --- lib/Target/AMDGPU/SILowerControlFlow.cpp +++ lib/Target/AMDGPU/SILowerControlFlow.cpp @@ -199,8 +199,8 @@ MachineInstr *And = BuildMI(MBB, I, DL, TII->get(AMDGPU::S_AND_B64), Tmp) .addReg(CopyReg) - //.addReg(AMDGPU::EXEC) - .addReg(Cond.getReg()); + .add(Cond); + setImpSCCDefDead(*And, true); MachineInstr *Xor = nullptr; Index: test/CodeGen/AMDGPU/si-lower-control-flow.mir =================================================================== --- test/CodeGen/AMDGPU/si-lower-control-flow.mir +++ test/CodeGen/AMDGPU/si-lower-control-flow.mir @@ -1,23 +1,53 @@ +# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py # RUN: llc -mtriple=amdgcn-amd-amdhsa -run-pass=si-lower-control-flow -verify-machineinstrs %s -o - | FileCheck -check-prefixes=GCN %s # Check that assert is not triggered -# GCN-LABEL: name: si-lower-control-flow{{$}} -# GCN-CHECK: S_LOAD_DWORD_IMM - ---- | - - define amdgpu_kernel void @si-lower-control-flow() { - ret void - } ... --- name: si-lower-control-flow body: | bb.0: + ; GCN-LABEL: name: si-lower-control-flow + ; GCN: [[COPY:%[0-9]+]]:sgpr_64 = COPY $sgpr4_sgpr5 + ; GCN: [[S_LOAD_DWORD_IMM:%[0-9]+]]:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM [[COPY]], 16, 0 + ; GCN: [[S_AND_B32_:%[0-9]+]]:sreg_32_xm0 = S_AND_B32 [[S_LOAD_DWORD_IMM]], 255, implicit-def $scc + ; GCN: [[S_AND_B32_1:%[0-9]+]]:sreg_32_xm0 = S_AND_B32 65535, [[S_AND_B32_]], implicit-def $scc + ; GCN: S_ENDPGM %0:sgpr_64 = COPY $sgpr4_sgpr5 %1:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM %0, 16, 0 %2:sreg_32_xm0 = S_AND_B32 %1, 255, implicit-def $scc %3:sreg_32_xm0 = S_AND_B32 65535, %2, implicit-def $scc S_ENDPGM ... + +--- +name: preserve_undef_flag_si_if_src +tracksRegLiveness: true +body: | + ; GCN-LABEL: name: preserve_undef_flag_si_if_src + ; GCN: bb.0: + ; GCN: successors: %bb.1(0x40000000), %bb.2(0x40000000) + ; GCN: [[COPY:%[0-9]+]]:sreg_64 = COPY $exec, implicit-def $exec + ; GCN: [[S_AND_B64_:%[0-9]+]]:sreg_64 = S_AND_B64 [[COPY]], undef %1:sreg_64, implicit-def dead $scc + ; GCN: [[S_XOR_B64_:%[0-9]+]]:sreg_64 = S_XOR_B64 [[S_AND_B64_]], [[COPY]], implicit-def dead $scc + ; GCN: $exec = S_MOV_B64_term killed [[S_AND_B64_]] + ; GCN: SI_MASK_BRANCH %bb.2, implicit $exec + ; GCN: S_BRANCH %bb.1 + ; GCN: bb.1: + ; GCN: successors: %bb.2(0x80000000) + ; GCN: bb.2: + ; GCN: S_ENDPGM + bb.0: + successors: %bb.1, %bb.2 + + %1:sreg_64 = SI_IF undef %0:sreg_64, %bb.2, implicit-def dead $exec, implicit-def dead $scc, implicit $exec + S_BRANCH %bb.1 + + bb.1: + successors: %bb.2 + + bb.2: + S_ENDPGM + +...