Index: compiler-rt/lib/builtins/riscv/restore.S =================================================================== --- compiler-rt/lib/builtins/riscv/restore.S +++ compiler-rt/lib/builtins/riscv/restore.S @@ -25,7 +25,23 @@ .globl __riscv_restore_12 .type __riscv_restore_12,@function __riscv_restore_12: + .cfi_startproc + .cfi_def_cfa_offset 64 + .cfi_offset s11, -64+12 + .cfi_offset s10, -64+16 + .cfi_offset s9, -64+20 + .cfi_offset s8, -64+24 + .cfi_offset s7, -64+28 + .cfi_offset s6, -64+32 + .cfi_offset s5, -64+36 + .cfi_offset s4, -64+40 + .cfi_offset s3, -64+44 + .cfi_offset s2, -64+48 + .cfi_offset s1, -64+52 + .cfi_offset s0, -64+56 + .cfi_offset ra, -64+60 lw s11, 12(sp) + .cfi_restore s11 addi sp, sp, 16 // fallthrough into __riscv_restore_11/10/9/8 @@ -41,10 +57,16 @@ __riscv_restore_10: __riscv_restore_9: __riscv_restore_8: + .cfi_restore s11 + .cfi_def_cfa_offset 48 lw s10, 0(sp) + .cfi_restore s10 lw s9, 4(sp) + .cfi_restore s9 lw s8, 8(sp) + .cfi_restore s8 lw s7, 12(sp) + .cfi_restore s7 addi sp, sp, 16 // fallthrough into __riscv_restore_7/6/5/4 @@ -60,10 +82,20 @@ __riscv_restore_6: __riscv_restore_5: __riscv_restore_4: + .cfi_restore s7 + .cfi_restore s8 + .cfi_restore s9 + .cfi_restore s10 + .cfi_restore s11 + .cfi_def_cfa_offset 32 lw s6, 0(sp) + .cfi_restore s6 lw s5, 4(sp) + .cfi_restore s5 lw s4, 8(sp) + .cfi_restore s4 lw s3, 12(sp) + .cfi_restore s3 addi sp, sp, 16 // fallthrough into __riscv_restore_3/2/1/0 @@ -79,19 +111,51 @@ __riscv_restore_2: __riscv_restore_1: __riscv_restore_0: + .cfi_restore s3 + .cfi_restore s4 + .cfi_restore s5 + .cfi_restore s6 + .cfi_restore s7 + .cfi_restore s8 + .cfi_restore s9 + .cfi_restore s10 + .cfi_restore s11 + .cfi_def_cfa_offset 16 lw s2, 0(sp) + .cfi_restore s2 lw s1, 4(sp) + .cfi_restore s1 lw s0, 8(sp) + .cfi_restore s0 lw ra, 12(sp) + .cfi_restore ra addi sp, sp, 16 + .cfi_def_cfa_offset 0 ret + .cfi_endproc #elif __riscv_xlen == 64 .globl __riscv_restore_12 .type __riscv_restore_12,@function __riscv_restore_12: + .cfi_startproc + .cfi_def_cfa_offset 112 + .cfi_offset s11, -112+8 + .cfi_offset s10, -112+16 + .cfi_offset s9, -112+24 + .cfi_offset s8, -112+32 + .cfi_offset s7, -112+40 + .cfi_offset s6, -112+48 + .cfi_offset s5, -112+56 + .cfi_offset s4, -112+64 + .cfi_offset s3, -112+72 + .cfi_offset s2, -112+80 + .cfi_offset s1, -112+88 + .cfi_offset s0, -112+96 + .cfi_offset ra, -112+104 ld s11, 8(sp) + .cfi_restore s11 addi sp, sp, 16 // fallthrough into __riscv_restore_11/10 @@ -101,8 +165,12 @@ .type __riscv_restore_10,@function __riscv_restore_11: __riscv_restore_10: + .cfi_restore s11 + .cfi_def_cfa_offset 96 ld s10, 0(sp) + .cfi_restore s10 ld s9, 8(sp) + .cfi_restore s9 addi sp, sp, 16 // fallthrough into __riscv_restore_9/8 @@ -112,8 +180,14 @@ .type __riscv_restore_8,@function __riscv_restore_9: __riscv_restore_8: + .cfi_restore s9 + .cfi_restore s10 + .cfi_restore s11 + .cfi_def_cfa_offset 80 ld s8, 0(sp) + .cfi_restore s8 ld s7, 8(sp) + .cfi_restore s7 addi sp, sp, 16 // fallthrough into __riscv_restore_7/6 @@ -123,8 +197,16 @@ .type __riscv_restore_6,@function __riscv_restore_7: __riscv_restore_6: + .cfi_restore s7 + .cfi_restore s8 + .cfi_restore s9 + .cfi_restore s10 + .cfi_restore s11 + .cfi_def_cfa_offset 64 ld s6, 0(sp) + .cfi_restore s6 ld s5, 8(sp) + .cfi_restore s5 addi sp, sp, 16 // fallthrough into __riscv_restore_5/4 @@ -134,8 +216,18 @@ .type __riscv_restore_4,@function __riscv_restore_5: __riscv_restore_4: + .cfi_restore s5 + .cfi_restore s6 + .cfi_restore s7 + .cfi_restore s8 + .cfi_restore s9 + .cfi_restore s10 + .cfi_restore s11 + .cfi_def_cfa_offset 48 ld s4, 0(sp) + .cfi_restore s4 ld s3, 8(sp) + .cfi_restore s3 addi sp, sp, 16 // fallthrough into __riscv_restore_3/2 @@ -145,8 +237,20 @@ .type __riscv_restore_2,@function __riscv_restore_3: __riscv_restore_2: + .cfi_restore s3 + .cfi_restore s4 + .cfi_restore s5 + .cfi_restore s6 + .cfi_restore s7 + .cfi_restore s8 + .cfi_restore s9 + .cfi_restore s10 + .cfi_restore s11 + .cfi_def_cfa_offset 32 ld s2, 0(sp) + .cfi_restore s2 ld s1, 8(sp) + .cfi_restore s1 addi sp, sp, 16 // fallthrough into __riscv_restore_1/0 @@ -156,10 +260,26 @@ .type __riscv_restore_0,@function __riscv_restore_1: __riscv_restore_0: + .cfi_restore s1 + .cfi_restore s2 + .cfi_restore s3 + .cfi_restore s4 + .cfi_restore s5 + .cfi_restore s6 + .cfi_restore s7 + .cfi_restore s8 + .cfi_restore s9 + .cfi_restore s10 + .cfi_restore s11 + .cfi_def_cfa_offset 16 ld s0, 0(sp) + .cfi_restore s0 ld ra, 8(sp) + .cfi_restore ra addi sp, sp, 16 + .cfi_def_cfa_offset 0 ret + .cfi_endproc #else # error "xlen must be 32 or 64 for save-restore implementation Index: compiler-rt/lib/builtins/riscv/save.S =================================================================== --- compiler-rt/lib/builtins/riscv/save.S +++ compiler-rt/lib/builtins/riscv/save.S @@ -21,9 +21,13 @@ .globl __riscv_save_12 .type __riscv_save_12,@function __riscv_save_12: + .cfi_startproc + .cfi_return_column t0 # return through t0 not ra addi sp, sp, -64 + .cfi_def_cfa_offset 64 mv t1, zero sw s11, 12(sp) + .cfi_offset s11, -64+12 j .Lriscv_save_11_8 .globl __riscv_save_11 @@ -38,13 +42,19 @@ __riscv_save_10: __riscv_save_9: __riscv_save_8: + .cfi_restore s11 addi sp, sp, -64 + .cfi_def_cfa_offset 64 li t1, 16 .Lriscv_save_11_8: sw s10, 16(sp) + .cfi_offset s10, -64+16 sw s9, 20(sp) + .cfi_offset s9, -64+20 sw s8, 24(sp) + .cfi_offset s8, -64+24 sw s7, 28(sp) + .cfi_offset s7, -64+28 j .Lriscv_save_7_4 .globl __riscv_save_7 @@ -59,19 +69,34 @@ __riscv_save_6: __riscv_save_5: __riscv_save_4: + .cfi_restore s7 + .cfi_restore s8 + .cfi_restore s9 + .cfi_restore s10 + .cfi_restore s11 addi sp, sp, -64 + .cfi_def_cfa_offset 64 li t1, 32 .Lriscv_save_7_4: sw s6, 32(sp) + .cfi_offset s6, -64+32 sw s5, 36(sp) + .cfi_offset s5, -64+36 sw s4, 40(sp) + .cfi_offset s4, -64+40 sw s3, 44(sp) + .cfi_offset s3, -64+44 sw s2, 48(sp) + .cfi_offset s2, -64+48 sw s1, 52(sp) + .cfi_offset s1, -64+52 sw s0, 56(sp) + .cfi_offset s0, -64+56 sw ra, 60(sp) + .cfi_offset ra, -64+60 add sp, sp, t1 jr t0 + .cfi_endproc .globl __riscv_save_3 .type __riscv_save_3,@function @@ -85,21 +110,33 @@ __riscv_save_2: __riscv_save_1: __riscv_save_0: + .cfi_startproc + .cfi_return_column t0 # return through t0 not ra addi sp, sp, -16 + .cfi_def_cfa_offset 16 sw s2, 0(sp) + .cfi_offset s2, -16+0 sw s1, 4(sp) + .cfi_offset s1, -16+4 sw s0, 8(sp) + .cfi_offset s0, -16+8 sw ra, 12(sp) + .cfi_offset ra, -16+12 jr t0 + .cfi_endproc #elif __riscv_xlen == 64 .globl __riscv_save_12 .type __riscv_save_12,@function __riscv_save_12: + .cfi_startproc + .cfi_return_column t0 # return through t0 not ra addi sp, sp, -112 + .cfi_def_cfa_offset 112 mv t1, zero sd s11, 8(sp) + .cfi_offset s11, -112+8 j .Lriscv_save_11_10 .globl __riscv_save_11 @@ -108,11 +145,15 @@ .type __riscv_save_10,@function __riscv_save_11: __riscv_save_10: + .cfi_restore s11 addi sp, sp, -112 + .cfi_def_cfa_offset 112 li t1, 16 .Lriscv_save_11_10: sd s10, 16(sp) + .cfi_offset s10, -112+16 sd s9, 24(sp) + .cfi_offset s9, -112+24 j .Lriscv_save_9_8 .globl __riscv_save_9 @@ -121,11 +162,17 @@ .type __riscv_save_8,@function __riscv_save_9: __riscv_save_8: + .cfi_restore s9 + .cfi_restore s10 + .cfi_restore s11 addi sp, sp, -112 + .cfi_def_cfa_offset 112 li t1, 32 .Lriscv_save_9_8: sd s8, 32(sp) + .cfi_offset s8, -112+32 sd s7, 40(sp) + .cfi_offset s7, -112+40 j .Lriscv_save_7_6 .globl __riscv_save_7 @@ -134,11 +181,19 @@ .type __riscv_save_6,@function __riscv_save_7: __riscv_save_6: + .cfi_restore s7 + .cfi_restore s8 + .cfi_restore s9 + .cfi_restore s10 + .cfi_restore s11 addi sp, sp, -112 + .cfi_def_cfa_offset 112 li t1, 48 .Lriscv_save_7_6: sd s6, 48(sp) + .cfi_offset s6, -112+48 sd s5, 56(sp) + .cfi_offset s5, -112+56 j .Lriscv_save_5_4 .globl __riscv_save_5 @@ -147,11 +202,21 @@ .type __riscv_save_4,@function __riscv_save_5: __riscv_save_4: + .cfi_restore s5 + .cfi_restore s6 + .cfi_restore s7 + .cfi_restore s8 + .cfi_restore s9 + .cfi_restore s10 + .cfi_restore s11 addi sp, sp, -112 + .cfi_def_cfa_offset 112 li t1, 64 .Lriscv_save_5_4: sd s4, 64(sp) + .cfi_offset s4, -112+64 sd s3, 72(sp) + .cfi_offset s3, -112+72 j .Lriscv_save_3_2 .globl __riscv_save_3 @@ -160,15 +225,30 @@ .type __riscv_save_2,@function __riscv_save_3: __riscv_save_2: + .cfi_restore s3 + .cfi_restore s4 + .cfi_restore s5 + .cfi_restore s6 + .cfi_restore s7 + .cfi_restore s8 + .cfi_restore s9 + .cfi_restore s10 + .cfi_restore s11 addi sp, sp, -112 + .cfi_def_cfa_offset 112 li t1, 80 .Lriscv_save_3_2: sd s2, 80(sp) + .cfi_offset s2, -112+80 sd s1, 88(sp) + .cfi_offset s1, -112+88 sd s0, 96(sp) + .cfi_offset s0, -112+96 sd ra, 104(sp) + .cfi_offset ra, -112+104 add sp, sp, t1 jr t0 + .cfi_endproc .globl __riscv_save_1 .type __riscv_save_1,@function @@ -176,10 +256,16 @@ .type __riscv_save_0,@function __riscv_save_1: __riscv_save_0: + .cfi_startproc + .cfi_return_column t0 # return through t0 not ra addi sp, sp, -16 + .cfi_def_cfa_offset 16 sd s0, 0(sp) + .cfi_offset s0, -16+0 sd ra, 8(sp) + .cfi_offset ra, -16+8 jr t0 + .cfi_endproc #else # error "xlen must be 32 or 64 for save-restore implementation