diff --git a/llvm/include/llvm/CodeGen/TargetRegisterInfo.h b/llvm/include/llvm/CodeGen/TargetRegisterInfo.h --- a/llvm/include/llvm/CodeGen/TargetRegisterInfo.h +++ b/llvm/include/llvm/CodeGen/TargetRegisterInfo.h @@ -1094,6 +1094,13 @@ inline MCRegister getSubReg(MCRegister Reg, unsigned Idx) const { return static_cast(this)->getSubReg(Reg, Idx); } + + /// Some targets have non-allocatable registers that aren't technically part + /// of the explicit callee saved register list, but should be handled as such + /// in certain cases. + virtual bool isNonallocatableRegisterCalleeSave(MCRegister Reg) const { + return false; + } }; //===----------------------------------------------------------------------===// diff --git a/llvm/lib/CodeGen/ShrinkWrap.cpp b/llvm/lib/CodeGen/ShrinkWrap.cpp --- a/llvm/lib/CodeGen/ShrinkWrap.cpp +++ b/llvm/lib/CodeGen/ShrinkWrap.cpp @@ -288,8 +288,15 @@ // separately. An SP mentioned by a call instruction, we can ignore, // though, as it's harmless and we do not want to effectively disable tail // calls by forcing the restore point to post-dominate them. - UseOrDefCSR = (!MI.isCall() && PhysReg == SP) || - RCI.getLastCalleeSavedAlias(PhysReg); + const MachineFunction *MF = MI.getParent()->getParent(); + const TargetRegisterInfo *TRI = MF->getSubtarget().getRegisterInfo(); + UseOrDefCSR |= !MI.isCall() && PhysReg == SP; + UseOrDefCSR |= RCI.getLastCalleeSavedAlias(PhysReg); + // PPC's LR is technically not considered callee saved since it's also + // non-allocatable, but if we early clobber the LR, we sure as hell don't + // want to shrinkwrap across such an MachineInstr. + UseOrDefCSR |= MO.isEarlyClobber() && + TRI->isNonallocatableRegisterCalleeSave(PhysReg); } else if (MO.isRegMask()) { // Check if this regmask clobbers any of the CSRs. for (unsigned Reg : getCurrentCSRs(RS)) { diff --git a/llvm/lib/Target/PowerPC/PPCRegisterInfo.h b/llvm/lib/Target/PowerPC/PPCRegisterInfo.h --- a/llvm/lib/Target/PowerPC/PPCRegisterInfo.h +++ b/llvm/lib/Target/PowerPC/PPCRegisterInfo.h @@ -185,6 +185,10 @@ return RegName; } + + bool isNonallocatableRegisterCalleeSave(MCRegister Reg) const override { + return Reg == PPC::LR || Reg == PPC::LR8; + } }; } // end namespace llvm diff --git a/llvm/test/CodeGen/PowerPC/ppc64-inlineasm-clobber.ll b/llvm/test/CodeGen/PowerPC/ppc64-inlineasm-clobber.ll --- a/llvm/test/CodeGen/PowerPC/ppc64-inlineasm-clobber.ll +++ b/llvm/test/CodeGen/PowerPC/ppc64-inlineasm-clobber.ll @@ -75,43 +75,41 @@ define dso_local signext i32 @ClobberLR_BR(i32 signext %in) #0 { ; PPC64LE-LABEL: ClobberLR_BR: ; PPC64LE: # %bb.0: # %entry +; PPC64LE-NEXT: mflr r0 +; PPC64LE-NEXT: std r0, 16(r1) +; PPC64LE-NEXT: stdu r1, -32(r1) ; PPC64LE-NEXT: #APP ; PPC64LE-NEXT: nop ; PPC64LE-NEXT: #NO_APP -; PPC64LE-NEXT: # %bb.1: # %return +; PPC64LE-NEXT: .LBB3_1: # %return ; PPC64LE-NEXT: extsw r3, r3 -; PPC64LE-NEXT: blr -; PPC64LE-NEXT: .Ltmp0: # Block address taken -; PPC64LE-NEXT: .LBB3_2: # %return_early -; PPC64LE-NEXT: mflr r0 -; PPC64LE-NEXT: std r0, 16(r1) -; PPC64LE-NEXT: stdu r1, -32(r1) -; PPC64LE-NEXT: li r3, 0 ; PPC64LE-NEXT: addi r1, r1, 32 ; PPC64LE-NEXT: ld r0, 16(r1) ; PPC64LE-NEXT: mtlr r0 -; PPC64LE-NEXT: extsw r3, r3 ; PPC64LE-NEXT: blr +; PPC64LE-NEXT: .Ltmp0: # Block address taken +; PPC64LE-NEXT: .LBB3_2: # %return_early +; PPC64LE-NEXT: li r3, 0 +; PPC64LE-NEXT: b .LBB3_1 ; ; PPC64BE-LABEL: ClobberLR_BR: ; PPC64BE: # %bb.0: # %entry +; PPC64BE-NEXT: mflr r0 +; PPC64BE-NEXT: std r0, 16(r1) +; PPC64BE-NEXT: stdu r1, -48(r1) ; PPC64BE-NEXT: #APP ; PPC64BE-NEXT: nop ; PPC64BE-NEXT: #NO_APP -; PPC64BE-NEXT: # %bb.1: # %return +; PPC64BE-NEXT: .LBB3_1: # %return ; PPC64BE-NEXT: extsw r3, r3 -; PPC64BE-NEXT: blr -; PPC64BE-NEXT: .Ltmp0: # Block address taken -; PPC64BE-NEXT: .LBB3_2: # %return_early -; PPC64BE-NEXT: mflr r0 -; PPC64BE-NEXT: std r0, 16(r1) -; PPC64BE-NEXT: stdu r1, -48(r1) -; PPC64BE-NEXT: li r3, 0 ; PPC64BE-NEXT: addi r1, r1, 48 ; PPC64BE-NEXT: ld r0, 16(r1) ; PPC64BE-NEXT: mtlr r0 -; PPC64BE-NEXT: extsw r3, r3 ; PPC64BE-NEXT: blr +; PPC64BE-NEXT: .Ltmp0: # Block address taken +; PPC64BE-NEXT: .LBB3_2: # %return_early +; PPC64BE-NEXT: li r3, 0 +; PPC64BE-NEXT: b .LBB3_1 entry: callbr void asm sideeffect "nop", "X,~{lr}"(i8* blockaddress(@ClobberLR_BR, %return_early)) to label %return [label %return_early]