diff --git a/llvm/lib/Target/RISCV/RISCVFrameLowering.cpp b/llvm/lib/Target/RISCV/RISCVFrameLowering.cpp --- a/llvm/lib/Target/RISCV/RISCVFrameLowering.cpp +++ b/llvm/lib/Target/RISCV/RISCVFrameLowering.cpp @@ -541,6 +541,7 @@ const RISCVRegisterInfo *RI = STI.getRegisterInfo(); MachineFrameInfo &MFI = MF.getFrameInfo(); auto *RVFI = MF.getInfo(); + const RISCVInstrInfo *TII = STI.getInstrInfo(); Register FPReg = getFPReg(STI); Register SPReg = getSPReg(STI); @@ -606,6 +607,50 @@ adjustReg(MBB, LastFrameDestroy, DL, SPReg, SPReg, SecondSPAdjustAmount, MachineInstr::FrameDestroy); + + // Emit ".cfi_def_cfa_offset FirstSPAdjustAmount" if using an sp-based CFA + if (!hasFP(MF)) { + unsigned CFIIndex = MF.addFrameInst( + MCCFIInstruction::cfiDefCfaOffset(nullptr, -FirstSPAdjustAmount)); + BuildMI(MBB, LastFrameDestroy, DL, + TII->get(TargetOpcode::CFI_INSTRUCTION)) + .addCFIIndex(CFIIndex); + } + } + + if (hasFP(MF)) { + // To find the instruction restoring FP from stack. + for (auto &I = LastFrameDestroy; I != MBBI; ++I) { + if (I->mayLoad() && I->getOperand(0).isReg()) { + Register DestReg = I->getOperand(0).getReg(); + if (DestReg == FPReg) { + // If there is frame pointer, after restoring $fp registers, we + // need adjust CFA back to the correct sp-based offset. + // Emit ".cfi_def_cfa $sp, CFAOffset" + uint64_t CFAOffset = + FirstSPAdjustAmount + ? FirstSPAdjustAmount + RVFI->getVarArgsSaveSize() + : FPOffset; + unsigned CFIIndex = MF.addFrameInst(MCCFIInstruction::cfiDefCfa( + nullptr, RI->getDwarfRegNum(SPReg, true), CFAOffset)); + BuildMI(MBB, std::next(I), DL, + TII->get(TargetOpcode::CFI_INSTRUCTION)) + .addCFIIndex(CFIIndex); + break; + } + } + } + } + + // Add CFI directives for callee-saved registers. + // Iterate over list of callee-saved registers and emit .cfi_restore + // directives. + for (const auto &Entry : CSI) { + Register Reg = Entry.getReg(); + unsigned CFIIndex = MF.addFrameInst(MCCFIInstruction::createRestore( + nullptr, RI->getDwarfRegNum(Reg, true))); + BuildMI(MBB, MBBI, DL, TII->get(TargetOpcode::CFI_INSTRUCTION)) + .addCFIIndex(CFIIndex); } if (FirstSPAdjustAmount) @@ -616,6 +661,13 @@ // Emit epilogue for shadow call stack. emitSCSEpilogue(MF, MBB, MBBI, DL); + + // After restoring $sp, we need to adjust CFA to $(sp + 0) + // Emit ".cfi_def_cfa_offset 0" + unsigned CFIIndex = + MF.addFrameInst(MCCFIInstruction::cfiDefCfaOffset(nullptr, 0)); + BuildMI(MBB, MBBI, DL, TII->get(TargetOpcode::CFI_INSTRUCTION)) + .addCFIIndex(CFIIndex); } StackOffset diff --git a/llvm/test/CodeGen/RISCV/addimm-mulimm.ll b/llvm/test/CodeGen/RISCV/addimm-mulimm.ll --- a/llvm/test/CodeGen/RISCV/addimm-mulimm.ll +++ b/llvm/test/CodeGen/RISCV/addimm-mulimm.ll @@ -13,6 +13,7 @@ ; RV32IMB-NEXT: addi a1, zero, 29 ; RV32IMB-NEXT: mul a0, a0, a1 ; RV32IMB-NEXT: addi a0, a0, 1073 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: add_mul_combine_accept_a1: @@ -20,6 +21,7 @@ ; RV64IMB-NEXT: addi a1, zero, 29 ; RV64IMB-NEXT: mulw a0, a0, a1 ; RV64IMB-NEXT: addiw a0, a0, 1073 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = add i32 %x, 37 %tmp1 = mul i32 %tmp0, 29 @@ -32,6 +34,7 @@ ; RV32IMB-NEXT: addi a1, zero, 29 ; RV32IMB-NEXT: mul a0, a0, a1 ; RV32IMB-NEXT: addi a0, a0, 1073 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: add_mul_combine_accept_a2: @@ -39,6 +42,7 @@ ; RV64IMB-NEXT: addi a1, zero, 29 ; RV64IMB-NEXT: mulw a0, a0, a1 ; RV64IMB-NEXT: addiw a0, a0, 1073 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = add i32 %x, 37 %tmp1 = mul i32 %tmp0, 29 @@ -56,6 +60,7 @@ ; RV32IMB-NEXT: addi a0, a2, 1073 ; RV32IMB-NEXT: sltu a2, a0, a2 ; RV32IMB-NEXT: add a1, a1, a2 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: add_mul_combine_accept_a3: @@ -63,6 +68,7 @@ ; RV64IMB-NEXT: addi a1, zero, 29 ; RV64IMB-NEXT: mul a0, a0, a1 ; RV64IMB-NEXT: addi a0, a0, 1073 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = add i64 %x, 37 %tmp1 = mul i64 %tmp0, 29 @@ -77,6 +83,7 @@ ; RV32IMB-NEXT: lui a1, 50 ; RV32IMB-NEXT: addi a1, a1, 1119 ; RV32IMB-NEXT: add a0, a0, a1 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: add_mul_combine_accept_b1: @@ -86,6 +93,7 @@ ; RV64IMB-NEXT: lui a1, 50 ; RV64IMB-NEXT: addiw a1, a1, 1119 ; RV64IMB-NEXT: addw a0, a0, a1 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = add i32 %x, 8953 %tmp1 = mul i32 %tmp0, 23 @@ -100,6 +108,7 @@ ; RV32IMB-NEXT: lui a1, 50 ; RV32IMB-NEXT: addi a1, a1, 1119 ; RV32IMB-NEXT: add a0, a0, a1 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: add_mul_combine_accept_b2: @@ -109,6 +118,7 @@ ; RV64IMB-NEXT: lui a1, 50 ; RV64IMB-NEXT: addiw a1, a1, 1119 ; RV64IMB-NEXT: addw a0, a0, a1 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = add i32 %x, 8953 %tmp1 = mul i32 %tmp0, 23 @@ -128,6 +138,7 @@ ; RV32IMB-NEXT: add a0, a2, a0 ; RV32IMB-NEXT: sltu a2, a0, a2 ; RV32IMB-NEXT: add a1, a1, a2 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: add_mul_combine_accept_b3: @@ -137,6 +148,7 @@ ; RV64IMB-NEXT: lui a1, 50 ; RV64IMB-NEXT: addiw a1, a1, 1119 ; RV64IMB-NEXT: add a0, a0, a1 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = add i64 %x, 8953 %tmp1 = mul i64 %tmp0, 23 @@ -149,6 +161,7 @@ ; RV32IMB-NEXT: addi a0, a0, 1971 ; RV32IMB-NEXT: addi a1, zero, 29 ; RV32IMB-NEXT: mul a0, a0, a1 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: add_mul_combine_reject_a1: @@ -156,6 +169,7 @@ ; RV64IMB-NEXT: addiw a0, a0, 1971 ; RV64IMB-NEXT: addi a1, zero, 29 ; RV64IMB-NEXT: mulw a0, a0, a1 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = add i32 %x, 1971 %tmp1 = mul i32 %tmp0, 29 @@ -168,6 +182,7 @@ ; RV32IMB-NEXT: addi a0, a0, 1971 ; RV32IMB-NEXT: addi a1, zero, 29 ; RV32IMB-NEXT: mul a0, a0, a1 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: add_mul_combine_reject_a2: @@ -175,6 +190,7 @@ ; RV64IMB-NEXT: addiw a0, a0, 1971 ; RV64IMB-NEXT: addi a1, zero, 29 ; RV64IMB-NEXT: mulw a0, a0, a1 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = add i32 %x, 1971 %tmp1 = mul i32 %tmp0, 29 @@ -194,6 +210,7 @@ ; RV32IMB-NEXT: add a0, a2, a0 ; RV32IMB-NEXT: sltu a2, a0, a2 ; RV32IMB-NEXT: add a1, a1, a2 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: add_mul_combine_reject_a3: @@ -201,6 +218,7 @@ ; RV64IMB-NEXT: addi a0, a0, 1971 ; RV64IMB-NEXT: addi a1, zero, 29 ; RV64IMB-NEXT: mul a0, a0, a1 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = add i64 %x, 1971 %tmp1 = mul i64 %tmp0, 29 @@ -213,6 +231,7 @@ ; RV32IMB-NEXT: addi a0, a0, 1000 ; RV32IMB-NEXT: sh3add a1, a0, a0 ; RV32IMB-NEXT: sh3add a0, a1, a0 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: add_mul_combine_reject_c1: @@ -221,6 +240,7 @@ ; RV64IMB-NEXT: sh3add a1, a0, a0 ; RV64IMB-NEXT: sh3add a0, a1, a0 ; RV64IMB-NEXT: sext.w a0, a0 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = add i32 %x, 1000 %tmp1 = mul i32 %tmp0, 73 @@ -233,6 +253,7 @@ ; RV32IMB-NEXT: addi a0, a0, 1000 ; RV32IMB-NEXT: sh3add a1, a0, a0 ; RV32IMB-NEXT: sh3add a0, a1, a0 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: add_mul_combine_reject_c2: @@ -241,6 +262,7 @@ ; RV64IMB-NEXT: sh3add a1, a0, a0 ; RV64IMB-NEXT: sh3add a0, a1, a0 ; RV64IMB-NEXT: sext.w a0, a0 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = add i32 %x, 1000 %tmp1 = mul i32 %tmp0, 73 @@ -260,6 +282,7 @@ ; RV32IMB-NEXT: add a0, a2, a0 ; RV32IMB-NEXT: sltu a2, a0, a2 ; RV32IMB-NEXT: add a1, a1, a2 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: add_mul_combine_reject_c3: @@ -267,6 +290,7 @@ ; RV64IMB-NEXT: addi a0, a0, 1000 ; RV64IMB-NEXT: sh3add a1, a0, a0 ; RV64IMB-NEXT: sh3add a0, a1, a0 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = add i64 %x, 1000 %tmp1 = mul i64 %tmp0, 73 @@ -279,6 +303,7 @@ ; RV32IMB-NEXT: addi a0, a0, 1000 ; RV32IMB-NEXT: sh1add a0, a0, a0 ; RV32IMB-NEXT: slli a0, a0, 6 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: add_mul_combine_reject_d1: @@ -286,6 +311,7 @@ ; RV64IMB-NEXT: addi a0, a0, 1000 ; RV64IMB-NEXT: sh1add a0, a0, a0 ; RV64IMB-NEXT: slliw a0, a0, 6 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = add i32 %x, 1000 %tmp1 = mul i32 %tmp0, 192 @@ -298,6 +324,7 @@ ; RV32IMB-NEXT: addi a0, a0, 1000 ; RV32IMB-NEXT: sh1add a0, a0, a0 ; RV32IMB-NEXT: slli a0, a0, 6 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: add_mul_combine_reject_d2: @@ -305,6 +332,7 @@ ; RV64IMB-NEXT: addi a0, a0, 1000 ; RV64IMB-NEXT: sh1add a0, a0, a0 ; RV64IMB-NEXT: slliw a0, a0, 6 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = add i32 %x, 1000 %tmp1 = mul i32 %tmp0, 192 @@ -326,6 +354,7 @@ ; RV32IMB-NEXT: add a0, a2, a0 ; RV32IMB-NEXT: sltu a2, a0, a2 ; RV32IMB-NEXT: add a1, a1, a2 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: add_mul_combine_reject_d3: @@ -333,6 +362,7 @@ ; RV64IMB-NEXT: addi a0, a0, 1000 ; RV64IMB-NEXT: sh1add a0, a0, a0 ; RV64IMB-NEXT: slli a0, a0, 6 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = add i64 %x, 1000 %tmp1 = mul i64 %tmp0, 192 @@ -345,6 +375,7 @@ ; RV32IMB-NEXT: addi a0, a0, 1971 ; RV32IMB-NEXT: addi a1, zero, 29 ; RV32IMB-NEXT: mul a0, a0, a1 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: add_mul_combine_reject_e1: @@ -352,6 +383,7 @@ ; RV64IMB-NEXT: addiw a0, a0, 1971 ; RV64IMB-NEXT: addi a1, zero, 29 ; RV64IMB-NEXT: mulw a0, a0, a1 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = mul i32 %x, 29 %tmp1 = add i32 %tmp0, 57159 @@ -364,6 +396,7 @@ ; RV32IMB-NEXT: addi a0, a0, 1971 ; RV32IMB-NEXT: addi a1, zero, 29 ; RV32IMB-NEXT: mul a0, a0, a1 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: add_mul_combine_reject_e2: @@ -371,6 +404,7 @@ ; RV64IMB-NEXT: addiw a0, a0, 1971 ; RV64IMB-NEXT: addi a1, zero, 29 ; RV64IMB-NEXT: mulw a0, a0, a1 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = mul i32 %x, 29 %tmp1 = add i32 %tmp0, 57159 @@ -390,6 +424,7 @@ ; RV32IMB-NEXT: add a0, a2, a0 ; RV32IMB-NEXT: sltu a2, a0, a2 ; RV32IMB-NEXT: add a1, a1, a2 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: add_mul_combine_reject_e3: @@ -397,6 +432,7 @@ ; RV64IMB-NEXT: addi a0, a0, 1971 ; RV64IMB-NEXT: addi a1, zero, 29 ; RV64IMB-NEXT: mul a0, a0, a1 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = mul i64 %x, 29 %tmp1 = add i64 %tmp0, 57159 @@ -410,6 +446,7 @@ ; RV32IMB-NEXT: addi a1, zero, 29 ; RV32IMB-NEXT: mul a0, a0, a1 ; RV32IMB-NEXT: addi a0, a0, 11 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: add_mul_combine_reject_f1: @@ -418,6 +455,7 @@ ; RV64IMB-NEXT: addi a1, zero, 29 ; RV64IMB-NEXT: mulw a0, a0, a1 ; RV64IMB-NEXT: addiw a0, a0, 11 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = mul i32 %x, 29 %tmp1 = add i32 %tmp0, 57199 @@ -431,6 +469,7 @@ ; RV32IMB-NEXT: addi a1, zero, 29 ; RV32IMB-NEXT: mul a0, a0, a1 ; RV32IMB-NEXT: addi a0, a0, 11 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: add_mul_combine_reject_f2: @@ -439,6 +478,7 @@ ; RV64IMB-NEXT: addi a1, zero, 29 ; RV64IMB-NEXT: mulw a0, a0, a1 ; RV64IMB-NEXT: addiw a0, a0, 11 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = mul i32 %x, 29 %tmp1 = add i32 %tmp0, 57199 @@ -458,6 +498,7 @@ ; RV32IMB-NEXT: add a0, a2, a0 ; RV32IMB-NEXT: sltu a2, a0, a2 ; RV32IMB-NEXT: add a1, a1, a2 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: add_mul_combine_reject_f3: @@ -466,6 +507,7 @@ ; RV64IMB-NEXT: addi a1, zero, 29 ; RV64IMB-NEXT: mul a0, a0, a1 ; RV64IMB-NEXT: addi a0, a0, 11 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = mul i64 %x, 29 %tmp1 = add i64 %tmp0, 57199 @@ -479,6 +521,7 @@ ; RV32IMB-NEXT: sh3add a1, a0, a0 ; RV32IMB-NEXT: sh3add a0, a1, a0 ; RV32IMB-NEXT: addi a0, a0, 10 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: add_mul_combine_reject_g1: @@ -487,6 +530,7 @@ ; RV64IMB-NEXT: sh3add a1, a0, a0 ; RV64IMB-NEXT: sh3add a0, a1, a0 ; RV64IMB-NEXT: addiw a0, a0, 10 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = mul i32 %x, 73 %tmp1 = add i32 %tmp0, 7310 @@ -500,6 +544,7 @@ ; RV32IMB-NEXT: sh3add a1, a0, a0 ; RV32IMB-NEXT: sh3add a0, a1, a0 ; RV32IMB-NEXT: addi a0, a0, 10 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: add_mul_combine_reject_g2: @@ -508,6 +553,7 @@ ; RV64IMB-NEXT: sh3add a1, a0, a0 ; RV64IMB-NEXT: sh3add a0, a1, a0 ; RV64IMB-NEXT: addiw a0, a0, 10 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = mul i32 %x, 73 %tmp1 = add i32 %tmp0, 7310 @@ -527,6 +573,7 @@ ; RV32IMB-NEXT: add a0, a2, a0 ; RV32IMB-NEXT: sltu a2, a0, a2 ; RV32IMB-NEXT: add a1, a1, a2 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: add_mul_combine_reject_g3: @@ -535,6 +582,7 @@ ; RV64IMB-NEXT: sh3add a1, a0, a0 ; RV64IMB-NEXT: sh3add a0, a1, a0 ; RV64IMB-NEXT: addi a0, a0, 10 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = mul i64 %x, 73 %tmp1 = add i64 %tmp0, 7310 @@ -555,6 +603,7 @@ ; RV32IMB-NEXT: addi a0, a0, 1024 ; RV32IMB-NEXT: sltu a2, a0, a2 ; RV32IMB-NEXT: add a1, a1, a2 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: add_mul_combine_infinite_loop: @@ -563,6 +612,7 @@ ; RV64IMB-NEXT: lui a1, 1 ; RV64IMB-NEXT: addiw a1, a1, -2048 ; RV64IMB-NEXT: sh3add a0, a0, a1 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = mul i64 %x, 24 %tmp1 = add i64 %tmp0, 2048 @@ -578,6 +628,7 @@ ; RV32IMB-NEXT: lui a1, 2 ; RV32IMB-NEXT: addi a1, a1, 798 ; RV32IMB-NEXT: add a0, a0, a1 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: mul3000_add8990_a: @@ -588,6 +639,7 @@ ; RV64IMB-NEXT: lui a1, 2 ; RV64IMB-NEXT: addiw a1, a1, 798 ; RV64IMB-NEXT: addw a0, a0, a1 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = mul i32 %x, 3000 %tmp1 = add i32 %tmp0, 8990 @@ -603,6 +655,7 @@ ; RV32IMB-NEXT: lui a1, 2 ; RV32IMB-NEXT: addi a1, a1, 798 ; RV32IMB-NEXT: add a0, a0, a1 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: mul3000_add8990_b: @@ -613,6 +666,7 @@ ; RV64IMB-NEXT: lui a1, 2 ; RV64IMB-NEXT: addiw a1, a1, 798 ; RV64IMB-NEXT: addw a0, a0, a1 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = mul i32 %x, 3000 %tmp1 = add i32 %tmp0, 8990 @@ -633,6 +687,7 @@ ; RV32IMB-NEXT: add a0, a2, a0 ; RV32IMB-NEXT: sltu a2, a0, a2 ; RV32IMB-NEXT: add a1, a1, a2 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: mul3000_add8990_c: @@ -643,6 +698,7 @@ ; RV64IMB-NEXT: lui a1, 2 ; RV64IMB-NEXT: addiw a1, a1, 798 ; RV64IMB-NEXT: add a0, a0, a1 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = mul i64 %x, 3000 %tmp1 = add i64 %tmp0, 8990 @@ -658,6 +714,7 @@ ; RV32IMB-NEXT: lui a1, 1048574 ; RV32IMB-NEXT: addi a1, a1, -798 ; RV32IMB-NEXT: add a0, a0, a1 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: mul3000_sub8990_a: @@ -668,6 +725,7 @@ ; RV64IMB-NEXT: lui a1, 1048574 ; RV64IMB-NEXT: addiw a1, a1, -798 ; RV64IMB-NEXT: addw a0, a0, a1 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = mul i32 %x, 3000 %tmp1 = add i32 %tmp0, -8990 @@ -683,6 +741,7 @@ ; RV32IMB-NEXT: lui a1, 1048574 ; RV32IMB-NEXT: addi a1, a1, -798 ; RV32IMB-NEXT: add a0, a0, a1 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: mul3000_sub8990_b: @@ -693,6 +752,7 @@ ; RV64IMB-NEXT: lui a1, 1048574 ; RV64IMB-NEXT: addiw a1, a1, -798 ; RV64IMB-NEXT: addw a0, a0, a1 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = mul i32 %x, 3000 %tmp1 = add i32 %tmp0, -8990 @@ -714,6 +774,7 @@ ; RV32IMB-NEXT: sltu a2, a0, a2 ; RV32IMB-NEXT: add a1, a1, a2 ; RV32IMB-NEXT: addi a1, a1, -1 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: mul3000_sub8990_c: @@ -724,6 +785,7 @@ ; RV64IMB-NEXT: lui a1, 1048574 ; RV64IMB-NEXT: addiw a1, a1, -798 ; RV64IMB-NEXT: add a0, a0, a1 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = mul i64 %x, 3000 %tmp1 = add i64 %tmp0, -8990 @@ -739,6 +801,7 @@ ; RV32IMB-NEXT: lui a1, 2 ; RV32IMB-NEXT: addi a1, a1, 798 ; RV32IMB-NEXT: add a0, a0, a1 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: mulneg3000_add8990_a: @@ -749,6 +812,7 @@ ; RV64IMB-NEXT: lui a1, 2 ; RV64IMB-NEXT: addiw a1, a1, 798 ; RV64IMB-NEXT: addw a0, a0, a1 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = mul i32 %x, -3000 %tmp1 = add i32 %tmp0, 8990 @@ -764,6 +828,7 @@ ; RV32IMB-NEXT: lui a1, 2 ; RV32IMB-NEXT: addi a1, a1, 798 ; RV32IMB-NEXT: add a0, a0, a1 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: mulneg3000_add8990_b: @@ -774,6 +839,7 @@ ; RV64IMB-NEXT: lui a1, 2 ; RV64IMB-NEXT: addiw a1, a1, 798 ; RV64IMB-NEXT: addw a0, a0, a1 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = mul i32 %x, -3000 %tmp1 = add i32 %tmp0, 8990 @@ -795,6 +861,7 @@ ; RV32IMB-NEXT: add a0, a2, a0 ; RV32IMB-NEXT: sltu a2, a0, a2 ; RV32IMB-NEXT: add a1, a1, a2 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: mulneg3000_add8990_c: @@ -805,6 +872,7 @@ ; RV64IMB-NEXT: lui a1, 2 ; RV64IMB-NEXT: addiw a1, a1, 798 ; RV64IMB-NEXT: add a0, a0, a1 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = mul i64 %x, -3000 %tmp1 = add i64 %tmp0, 8990 @@ -820,6 +888,7 @@ ; RV32IMB-NEXT: lui a1, 1048574 ; RV32IMB-NEXT: addi a1, a1, -798 ; RV32IMB-NEXT: add a0, a0, a1 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: mulneg3000_sub8990_a: @@ -830,6 +899,7 @@ ; RV64IMB-NEXT: lui a1, 1048574 ; RV64IMB-NEXT: addiw a1, a1, -798 ; RV64IMB-NEXT: addw a0, a0, a1 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = mul i32 %x, -3000 %tmp1 = add i32 %tmp0, -8990 @@ -845,6 +915,7 @@ ; RV32IMB-NEXT: lui a1, 1048574 ; RV32IMB-NEXT: addi a1, a1, -798 ; RV32IMB-NEXT: add a0, a0, a1 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: mulneg3000_sub8990_b: @@ -855,6 +926,7 @@ ; RV64IMB-NEXT: lui a1, 1048574 ; RV64IMB-NEXT: addiw a1, a1, -798 ; RV64IMB-NEXT: addw a0, a0, a1 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = mul i32 %x, -3000 %tmp1 = add i32 %tmp0, -8990 @@ -877,6 +949,7 @@ ; RV32IMB-NEXT: sltu a2, a0, a2 ; RV32IMB-NEXT: add a1, a1, a2 ; RV32IMB-NEXT: addi a1, a1, -1 +; RV32IMB-NEXT: .cfi_def_cfa_offset 0 ; RV32IMB-NEXT: ret ; ; RV64IMB-LABEL: mulneg3000_sub8990_c: @@ -887,6 +960,7 @@ ; RV64IMB-NEXT: lui a1, 1048574 ; RV64IMB-NEXT: addiw a1, a1, -798 ; RV64IMB-NEXT: add a0, a0, a1 +; RV64IMB-NEXT: .cfi_def_cfa_offset 0 ; RV64IMB-NEXT: ret %tmp0 = mul i64 %x, -3000 %tmp1 = add i64 %tmp0, -8990