Index: llvm/trunk/include/llvm/CodeGen/AsmPrinter.h =================================================================== --- llvm/trunk/include/llvm/CodeGen/AsmPrinter.h +++ llvm/trunk/include/llvm/CodeGen/AsmPrinter.h @@ -631,6 +631,11 @@ /// inline asm. void EmitInlineAsm(const MachineInstr *MI) const; + /// Add inline assembly info to the diagnostics machinery, so we can + /// emit file and position info. Returns SrcMgr memory buffer position. + unsigned addInlineAsmDiagBuffer(StringRef AsmStr, + const MDNode *LocMDNode) const; + //===------------------------------------------------------------------===// // Internal Implementation Details //===------------------------------------------------------------------===// Index: llvm/trunk/include/llvm/CodeGen/TargetRegisterInfo.h =================================================================== --- llvm/trunk/include/llvm/CodeGen/TargetRegisterInfo.h +++ llvm/trunk/include/llvm/CodeGen/TargetRegisterInfo.h @@ -510,6 +510,13 @@ /// markSuperRegs() and checkAllSuperRegsMarked() in this case. virtual BitVector getReservedRegs(const MachineFunction &MF) const = 0; + /// Returns false if we can't guarantee that Physreg, specified as an IR asm + /// clobber constraint, will be preserved across the statement. + virtual bool isAsmClobberable(const MachineFunction &MF, + unsigned PhysReg) const { + return true; + } + /// Returns true if PhysReg is unallocatable and constant throughout the /// function. Used by MachineRegisterInfo::isConstantPhysReg(). virtual bool isConstantPhysReg(unsigned PhysReg) const { return false; } Index: llvm/trunk/lib/CodeGen/AsmPrinter/AsmPrinterInlineAsm.cpp =================================================================== --- llvm/trunk/lib/CodeGen/AsmPrinter/AsmPrinterInlineAsm.cpp +++ llvm/trunk/lib/CodeGen/AsmPrinter/AsmPrinterInlineAsm.cpp @@ -71,6 +71,42 @@ DiagInfo->DiagHandler(Diag, DiagInfo->DiagContext, LocCookie); } +unsigned AsmPrinter::addInlineAsmDiagBuffer(StringRef AsmStr, + const MDNode *LocMDNode) const { + if (!DiagInfo) { + DiagInfo = make_unique(); + + MCContext &Context = MMI->getContext(); + Context.setInlineSourceManager(&DiagInfo->SrcMgr); + + LLVMContext &LLVMCtx = MMI->getModule()->getContext(); + if (LLVMCtx.getInlineAsmDiagnosticHandler()) { + DiagInfo->DiagHandler = LLVMCtx.getInlineAsmDiagnosticHandler(); + DiagInfo->DiagContext = LLVMCtx.getInlineAsmDiagnosticContext(); + DiagInfo->SrcMgr.setDiagHandler(srcMgrDiagHandler, DiagInfo.get()); + } + } + + SourceMgr &SrcMgr = DiagInfo->SrcMgr; + + std::unique_ptr Buffer; + // The inline asm source manager will outlive AsmStr, so make a copy of the + // string for SourceMgr to own. + Buffer = MemoryBuffer::getMemBufferCopy(AsmStr, ""); + + // Tell SrcMgr about this buffer, it takes ownership of the buffer. + unsigned BufNum = SrcMgr.AddNewSourceBuffer(std::move(Buffer), SMLoc()); + + // Store LocMDNode in DiagInfo, using BufNum as an identifier. + if (LocMDNode) { + DiagInfo->LocInfos.resize(BufNum); + DiagInfo->LocInfos[BufNum - 1] = LocMDNode; + } + + return BufNum; +} + + /// EmitInlineAsm - Emit a blob of inline asm to the output streamer. void AsmPrinter::EmitInlineAsm(StringRef Str, const MCSubtargetInfo &STI, const MCTargetOptions &MCOptions, @@ -98,39 +134,11 @@ return; } - if (!DiagInfo) { - DiagInfo = make_unique(); + unsigned BufNum = addInlineAsmDiagBuffer(Str, LocMDNode); + DiagInfo->SrcMgr.setIncludeDirs(MCOptions.IASSearchPaths); - MCContext &Context = MMI->getContext(); - Context.setInlineSourceManager(&DiagInfo->SrcMgr); - - LLVMContext &LLVMCtx = MMI->getModule()->getContext(); - if (LLVMCtx.getInlineAsmDiagnosticHandler()) { - DiagInfo->DiagHandler = LLVMCtx.getInlineAsmDiagnosticHandler(); - DiagInfo->DiagContext = LLVMCtx.getInlineAsmDiagnosticContext(); - DiagInfo->SrcMgr.setDiagHandler(srcMgrDiagHandler, DiagInfo.get()); - } - } - - SourceMgr &SrcMgr = DiagInfo->SrcMgr; - SrcMgr.setIncludeDirs(MCOptions.IASSearchPaths); - - std::unique_ptr Buffer; - // The inline asm source manager will outlive Str, so make a copy of the - // string for SourceMgr to own. - Buffer = MemoryBuffer::getMemBufferCopy(Str, ""); - - // Tell SrcMgr about this buffer, it takes ownership of the buffer. - unsigned BufNum = SrcMgr.AddNewSourceBuffer(std::move(Buffer), SMLoc()); - - // Store LocMDNode in DiagInfo, using BufNum as an identifier. - if (LocMDNode) { - DiagInfo->LocInfos.resize(BufNum); - DiagInfo->LocInfos[BufNum-1] = LocMDNode; - } - - std::unique_ptr Parser( - createMCAsmParser(SrcMgr, OutContext, *OutStreamer, *MAI, BufNum)); + std::unique_ptr Parser(createMCAsmParser( + DiagInfo->SrcMgr, OutContext, *OutStreamer, *MAI, BufNum)); // Do not use assembler-level information for parsing inline assembly. OutStreamer->setUseAssemblerInfoForParsing(false); @@ -519,6 +527,44 @@ MCOptions.SanitizeAddress = MF->getFunction().hasFnAttribute(Attribute::SanitizeAddress); + // Emit warnings if we use reserved registers on the clobber list, as + // that might give surprising results. + std::vector RestrRegs; + // Start with the first operand descriptor, and iterate over them. + for (unsigned I = InlineAsm::MIOp_FirstOperand, NumOps = MI->getNumOperands(); + I < NumOps; ++I) { + const MachineOperand &MO = MI->getOperand(I); + if (MO.isImm()) { + unsigned Flags = MO.getImm(); + const TargetRegisterInfo *TRI = MF->getSubtarget().getRegisterInfo(); + if (InlineAsm::getKind(Flags) == InlineAsm::Kind_Clobber && + !TRI->isAsmClobberable(*MF, MI->getOperand(I + 1).getReg())) { + RestrRegs.push_back(TRI->getName(MI->getOperand(I + 1).getReg())); + } + // Skip to one before the next operand descriptor, if it exists. + I += InlineAsm::getNumOperandRegisters(Flags); + } + } + + if (!RestrRegs.empty()) { + unsigned BufNum = addInlineAsmDiagBuffer(OS.str(), LocMD); + auto &SrcMgr = DiagInfo->SrcMgr; + SMLoc Loc = SMLoc::getFromPointer( + SrcMgr.getMemoryBuffer(BufNum)->getBuffer().begin()); + + std::string Msg = "inline asm clobber list contains reserved registers: "; + for (auto I = RestrRegs.begin(), E = RestrRegs.end(); I != E; I++) { + if(I != RestrRegs.begin()) + Msg += ", "; + Msg += *I; + } + std::string Note = "Reserved registers on the clobber list may not be " + "preserved across the asm statement, and clobbering them may " + "lead to undefined behaviour."; + SrcMgr.PrintMessage(Loc, SourceMgr::DK_Warning, Msg); + SrcMgr.PrintMessage(Loc, SourceMgr::DK_Note, Note); + } + EmitInlineAsm(OS.str(), getSubtargetInfo(), MCOptions, LocMD, MI->getInlineAsmDialect()); Index: llvm/trunk/lib/Target/AArch64/AArch64RegisterInfo.h =================================================================== --- llvm/trunk/lib/Target/AArch64/AArch64RegisterInfo.h +++ llvm/trunk/lib/Target/AArch64/AArch64RegisterInfo.h @@ -69,6 +69,8 @@ const uint32_t *getWindowsStackProbePreservedMask() const; BitVector getReservedRegs(const MachineFunction &MF) const override; + bool isAsmClobberable(const MachineFunction &MF, + unsigned PhysReg) const override; bool isConstantPhysReg(unsigned PhysReg) const override; const TargetRegisterClass * getPointerRegClass(const MachineFunction &MF, Index: llvm/trunk/lib/Target/AArch64/AArch64RegisterInfo.cpp =================================================================== --- llvm/trunk/lib/Target/AArch64/AArch64RegisterInfo.cpp +++ llvm/trunk/lib/Target/AArch64/AArch64RegisterInfo.cpp @@ -189,6 +189,11 @@ return false; } +bool AArch64RegisterInfo::isAsmClobberable(const MachineFunction &MF, + unsigned PhysReg) const { + return !isReservedReg(MF, PhysReg); +} + bool AArch64RegisterInfo::isConstantPhysReg(unsigned PhysReg) const { return PhysReg == AArch64::WZR || PhysReg == AArch64::XZR; } Index: llvm/trunk/lib/Target/ARM/ARMBaseRegisterInfo.h =================================================================== --- llvm/trunk/lib/Target/ARM/ARMBaseRegisterInfo.h +++ llvm/trunk/lib/Target/ARM/ARMBaseRegisterInfo.h @@ -131,6 +131,8 @@ CallingConv::ID) const; BitVector getReservedRegs(const MachineFunction &MF) const override; + bool isAsmClobberable(const MachineFunction &MF, + unsigned PhysReg) const override; const TargetRegisterClass * getPointerRegClass(const MachineFunction &MF, Index: llvm/trunk/lib/Target/ARM/ARMBaseRegisterInfo.cpp =================================================================== --- llvm/trunk/lib/Target/ARM/ARMBaseRegisterInfo.cpp +++ llvm/trunk/lib/Target/ARM/ARMBaseRegisterInfo.cpp @@ -209,6 +209,11 @@ return Reserved; } +bool ARMBaseRegisterInfo:: +isAsmClobberable(const MachineFunction &MF, unsigned PhysReg) const { + return !getReservedRegs(MF).test(PhysReg); +} + const TargetRegisterClass * ARMBaseRegisterInfo::getLargestLegalSuperClass(const TargetRegisterClass *RC, const MachineFunction &) const { Index: llvm/trunk/test/CodeGen/AArch64/inline-asm-clobber.ll =================================================================== --- llvm/trunk/test/CodeGen/AArch64/inline-asm-clobber.ll +++ llvm/trunk/test/CodeGen/AArch64/inline-asm-clobber.ll @@ -0,0 +1,8 @@ +; RUN: llc <%s -mtriple=aarch64-none-eabi 2>&1 | FileCheck %s + +; CHECK: warning: inline asm clobber list contains reserved registers: SP + +define void @foo() nounwind { + call void asm sideeffect "mov x7, #1", "~{x7},~{sp}"() + ret void +} Index: llvm/trunk/test/CodeGen/ARM/inline-asm-clobber.ll =================================================================== --- llvm/trunk/test/CodeGen/ARM/inline-asm-clobber.ll +++ llvm/trunk/test/CodeGen/ARM/inline-asm-clobber.ll @@ -0,0 +1,27 @@ +; RUN: llc <%s -mtriple=arm-none-eabi 2>&1 | FileCheck %s -check-prefix=CHECK + +; RUN: llc <%s -mtriple=arm-none-eabi -relocation-model=rwpi 2>&1 \ +; RUN: | FileCheck %s -check-prefix=RWPI + +; RUN: llc <%s -mtriple=arm-none-eabi --disable-fp-elim 2>&1 \ +; RUN: | FileCheck %s -check-prefix=NO_FP_ELIM + +; CHECK: warning: inline asm clobber list contains reserved registers: SP, PC +; CHECK: warning: inline asm clobber list contains reserved registers: R11 +; RWPI: warning: inline asm clobber list contains reserved registers: R9, SP, PC +; RWPI: warning: inline asm clobber list contains reserved registers: R11 +; NO_FP_ELIM: warning: inline asm clobber list contains reserved registers: R11, SP, PC +; NO_FP_ELIM: warning: inline asm clobber list contains reserved registers: R11 + +define void @foo() nounwind { + call void asm sideeffect "mov r7, #1", + "~{r9},~{r11},~{r12},~{lr},~{sp},~{pc},~{r10}"() + ret void +} + +define i32 @bar(i32 %i) { + %vla = alloca i32, i32 %i, align 4 + tail call void asm sideeffect "mov r7, #1", "~{r11}"() + %1 = load volatile i32, i32* %vla, align 4 + ret i32 %1 +}