Index: llvm/trunk/lib/Target/X86/X86FrameLowering.cpp =================================================================== --- llvm/trunk/lib/Target/X86/X86FrameLowering.cpp +++ llvm/trunk/lib/Target/X86/X86FrameLowering.cpp @@ -2550,6 +2550,18 @@ + " required but not provided"); } +// Return true if there are no non-ehpad successors to MBB and there are no +// non-meta instructions between MBBI and MBB.end(). +bool blockEndIsUnreachable(const MachineBasicBlock &MBB, + MachineBasicBlock::const_iterator MBBI) { + return std::all_of( + MBB.succ_begin(), MBB.succ_end(), + [](const MachineBasicBlock *Succ) { return Succ->isEHPad(); }) && + std::all_of(MBBI, MBB.end(), [](const MachineInstr &MI) { + return MI.isMetaInstruction(); + }); +} + /// Erlang programs may need a special prologue to handle the stack size they /// might need at runtime. That is because Erlang/OTP does not implement a C /// stack but uses a custom implementation of hybrid stack/heap architecture. @@ -2783,7 +2795,7 @@ unsigned Opcode = I->getOpcode(); bool isDestroy = Opcode == TII.getCallFrameDestroyOpcode(); DebugLoc DL = I->getDebugLoc(); - uint64_t Amount = !reserveCallFrame ? TII.getFrameSize(*I) : 0; + uint64_t Amount = TII.getFrameSize(*I); uint64_t InternalAmt = (isDestroy || Amount) ? TII.getFrameAdjustment(*I) : 0; I = MBB.erase(I); auto InsertPos = skipDebugInstructionsForward(I, MBB.end()); @@ -2872,7 +2884,7 @@ return I; } - if (isDestroy && InternalAmt) { + if (isDestroy && InternalAmt && !blockEndIsUnreachable(MBB, I)) { // If we are performing frame pointer elimination and if the callee pops // something off the stack pointer, add it back. We do this until we have // more advanced stack pointer tracking ability. Index: llvm/trunk/test/CodeGen/X86/noreturn-call-linux.ll =================================================================== --- llvm/trunk/test/CodeGen/X86/noreturn-call-linux.ll +++ llvm/trunk/test/CodeGen/X86/noreturn-call-linux.ll @@ -0,0 +1,59 @@ +; RUN: llc < %s -mtriple=x86_64-linux-gnu | FileCheck %s + +; PR43155, we used to emit dead stack adjustments for noreturn calls with stack +; arguments. + +; Original source code: +; __attribute__((noreturn)) void exit_manyarg(int, int, int, int, int, int, int, int, int, int); +; struct ByVal { +; int vals[10]; +; }; +; struct ByVal getbyval(); +; void make_push_unprofitable(struct ByVal); +; int foo(int c) { +; if (c) +; exit_manyarg(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); +; make_push_unprofitable(getbyval()); +; make_push_unprofitable(getbyval()); +; make_push_unprofitable(getbyval()); +; return 0; +; } + +%struct.ByVal = type { [10 x i32] } + +define dso_local i32 @foo(i32 %c) { +entry: + %agg.tmp = alloca %struct.ByVal, align 8 + %agg.tmp1 = alloca %struct.ByVal, align 8 + %agg.tmp2 = alloca %struct.ByVal, align 8 + %tobool = icmp eq i32 %c, 0 + br i1 %tobool, label %if.end, label %if.then + +if.then: ; preds = %entry + tail call void @exit_manyarg(i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10) #3 + unreachable + +if.end: ; preds = %entry + call void @getbyval(%struct.ByVal* nonnull sret %agg.tmp) #4 + call void @make_push_unprofitable(%struct.ByVal* nonnull byval(%struct.ByVal) align 8 %agg.tmp) #4 + call void @getbyval(%struct.ByVal* nonnull sret %agg.tmp1) #4 + call void @make_push_unprofitable(%struct.ByVal* nonnull byval(%struct.ByVal) align 8 %agg.tmp1) #4 + call void @getbyval(%struct.ByVal* nonnull sret %agg.tmp2) #4 + call void @make_push_unprofitable(%struct.ByVal* nonnull byval(%struct.ByVal) align 8 %agg.tmp2) #4 + ret i32 0 +} + +; CHECK-LABEL: foo: +; The main body is not important. +; CHECK: callq exit_manyarg +; CHECK-NOT: sub +; CHECK-NOT: add +; CHECK: # -- End function + +; Function Attrs: noreturn +declare dso_local void @exit_manyarg(i32, i32, i32, i32, i32, i32, i32, i32, i32, i32) noreturn + +declare dso_local void @make_push_unprofitable(%struct.ByVal* byval(%struct.ByVal) align 8) + +declare dso_local void @getbyval(%struct.ByVal* sret) + Index: llvm/trunk/test/CodeGen/X86/noreturn-call.ll =================================================================== --- llvm/trunk/test/CodeGen/X86/noreturn-call.ll +++ llvm/trunk/test/CodeGen/X86/noreturn-call.ll @@ -46,3 +46,59 @@ declare void @crash(i8*) noreturn declare void @crash2(i8*) declare void @g(i8*) + +%struct.ByVal = type { [10 x i32] } + +define dso_local i32 @pr43155() { +entry: + %agg.tmp = alloca %struct.ByVal, align 4 + %agg.tmp5 = alloca %struct.ByVal, align 4 + %agg.tmp6 = alloca %struct.ByVal, align 4 + %call = tail call i32 @cond() + %tobool = icmp eq i32 %call, 0 + br i1 %tobool, label %if.end, label %if.then + +if.then: ; preds = %entry + tail call x86_stdcallcc void @stdcall_abort(i32 12, i32 2) + unreachable + +if.end: ; preds = %entry + %call1 = tail call i32 @cond() + %tobool2 = icmp eq i32 %call1, 0 + br i1 %tobool2, label %if.end4, label %if.then3 + +if.then3: ; preds = %if.end + tail call x86_stdcallcc void @stdcall_abort(i32 15, i32 2) + unreachable + +if.end4: ; preds = %if.end + call void @getbyval(%struct.ByVal* nonnull sret %agg.tmp) + call void @make_push_unprofitable(%struct.ByVal* nonnull byval(%struct.ByVal) align 4 %agg.tmp) + call void @getbyval(%struct.ByVal* nonnull sret %agg.tmp5) + call void @make_push_unprofitable(%struct.ByVal* nonnull byval(%struct.ByVal) align 4 %agg.tmp5) + call void @getbyval(%struct.ByVal* nonnull sret %agg.tmp6) + call void @make_push_unprofitable(%struct.ByVal* nonnull byval(%struct.ByVal) align 4 %agg.tmp6) + ret i32 0 +} + +; Check that there are no stack adjustments after stdcall_abort. +; CHECK-LABEL: pr43155: +; The main function body contents are not important. +; CHECK: retl +; CHECK: # %if.then +; CHECK: calll _stdcall_abort@8 +; CHECK-NOT: sub +; CHECK-NOT: add +; CHECK: # %if.then3 +; CHECK: calll _stdcall_abort@8 +; CHECK-NOT: sub +; CHECK-NOT: add +; CHECK: # -- End function + +declare dso_local i32 @cond() + +declare dso_local x86_stdcallcc void @stdcall_abort(i32, i32) noreturn + +declare dso_local void @make_push_unprofitable(%struct.ByVal* byval(%struct.ByVal) align 4) + +declare dso_local void @getbyval(%struct.ByVal* sret)