diff --git a/llvm/lib/Target/PowerPC/PPCRegisterInfo.h b/llvm/lib/Target/PowerPC/PPCRegisterInfo.h --- a/llvm/lib/Target/PowerPC/PPCRegisterInfo.h +++ b/llvm/lib/Target/PowerPC/PPCRegisterInfo.h @@ -91,6 +91,8 @@ void adjustStackMapLiveOutMask(uint32_t *Mask) const override; BitVector getReservedRegs(const MachineFunction &MF) const override; + bool isAsmClobberable(const MachineFunction &MF, + MCRegister PhysReg) const override; bool isCallerPreservedPhysReg(MCRegister PhysReg, const MachineFunction &MF) const override; diff --git a/llvm/lib/Target/PowerPC/PPCRegisterInfo.cpp b/llvm/lib/Target/PowerPC/PPCRegisterInfo.cpp --- a/llvm/lib/Target/PowerPC/PPCRegisterInfo.cpp +++ b/llvm/lib/Target/PowerPC/PPCRegisterInfo.cpp @@ -390,6 +390,18 @@ return Reserved; } +bool PPCRegisterInfo::isAsmClobberable(const MachineFunction &MF, + MCRegister PhysReg) const { + // We cannot use getReservedRegs() to find the registers that are not asm + // clobberable because there are some reserved registers which can be + // clobbered by inline asm. For example, when LR is clobbered, the register is + // saved and restored. We will harcode the registers that are not asm + // cloberable in this function. + + // The stack pointer (X1/R1) is not clobberable by inline asm + return PhysReg != (TM.isPPC64() ? PPC::X1 : PPC::R1); +} + bool PPCRegisterInfo::requiresFrameIndexScavenging(const MachineFunction &MF) const { const PPCSubtarget &Subtarget = MF.getSubtarget(); const PPCInstrInfo *InstrInfo = Subtarget.getInstrInfo(); diff --git a/llvm/test/CodeGen/PowerPC/inline-asm-clobber-warning-64.ll b/llvm/test/CodeGen/PowerPC/inline-asm-clobber-warning-64.ll new file mode 100644 --- /dev/null +++ b/llvm/test/CodeGen/PowerPC/inline-asm-clobber-warning-64.ll @@ -0,0 +1,10 @@ +; RUN: llc < %s -verify-machineinstrs -mtriple=powerpc64-unknown-unkown \ +; RUN: -mcpu=pwr7 2>&1 | FileCheck %s + +; CHECK: warning: inline asm clobber list contains reserved registers: X1 +; CHECK-NEXT: note: Reserved registers on the clobber list may not be preserved across the asm statement, and clobbering them may lead to undefined behaviour. +define void @test_r1_clobber() { +entry: + call void asm sideeffect "nop", "~{x1}"() + ret void +} diff --git a/llvm/test/CodeGen/PowerPC/inline-asm-clobber-warning.ll b/llvm/test/CodeGen/PowerPC/inline-asm-clobber-warning.ll new file mode 100644 --- /dev/null +++ b/llvm/test/CodeGen/PowerPC/inline-asm-clobber-warning.ll @@ -0,0 +1,10 @@ +; RUN: llc < %s -verify-machineinstrs -mtriple=powerpc-unknown-unkown \ +; RUN: -mcpu=pwr7 2>&1 | FileCheck %s + +; CHECK: warning: inline asm clobber list contains reserved registers: R1 +; CHECK-NEXT: note: Reserved registers on the clobber list may not be preserved across the asm statement, and clobbering them may lead to undefined behaviour. +define void @test_r1_clobber() { +entry: + call void asm sideeffect "nop", "~{r1}"() + ret void +}