Index: compiler-rt/lib/builtins/CMakeLists.txt =================================================================== --- compiler-rt/lib/builtins/CMakeLists.txt +++ compiler-rt/lib/builtins/CMakeLists.txt @@ -620,7 +620,12 @@ endif() set(powerpc64le_SOURCES ${powerpc64_SOURCES}) -set(riscv_SOURCES ${GENERIC_SOURCES} ${GENERIC_TF_SOURCES}) +set(riscv_SOURCES + riscv/save.S + riscv/restore.S + ${GENERIC_SOURCES} + ${GENERIC_TF_SOURCES} +) set(riscv32_SOURCES riscv/mulsi3.S ${riscv_SOURCES} Index: compiler-rt/lib/builtins/riscv/restore.S =================================================================== --- /dev/null +++ compiler-rt/lib/builtins/riscv/restore.S @@ -0,0 +1,166 @@ +//===-- restore.S - restore up to 12 callee-save registers ----------------===// +// +// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. +// See https://llvm.org/LICENSE.txt for license information. +// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception +// +//===----------------------------------------------------------------------===// +// +// Multiple entry points depending on number of registers to restore +// +//===----------------------------------------------------------------------===// + +// All of the entry points are in the same section since we rely on many of +// them falling through into each other and don't want the linker to +// accidentally split them up, garbage collect, or reorder them. +// +// The entry points are grouped up into 2s for rv64 and 4s for rv32 since this +// is the minimum grouping which will maintain the required 16-byte stack +// alignment. + + .text + +#if __riscv_xlen == 32 + + .globl __riscv_restore_12 + .type __riscv_restore_12,@function +__riscv_restore_12: + lw s11, 12(sp) + addi sp, sp, 16 + // fallthrough into __riscv_restore_11/10/9/8 + + .globl __riscv_restore_11 + .type __riscv_restore_11,@function + .globl __riscv_restore_10 + .type __riscv_restore_10,@function + .globl __riscv_restore_9 + .type __riscv_restore_9,@function + .globl __riscv_restore_8 + .type __riscv_restore_8,@function +__riscv_restore_11: +__riscv_restore_10: +__riscv_restore_9: +__riscv_restore_8: + lw s10, 0(sp) + lw s9, 4(sp) + lw s8, 8(sp) + lw s7, 12(sp) + addi sp, sp, 16 + // fallthrough into __riscv_restore_7/6/5/4 + + .globl __riscv_restore_7 + .type __riscv_restore_7,@function + .globl __riscv_restore_6 + .type __riscv_restore_6,@function + .globl __riscv_restore_5 + .type __riscv_restore_5,@function + .globl __riscv_restore_4 + .type __riscv_restore_4,@function +__riscv_restore_7: +__riscv_restore_6: +__riscv_restore_5: +__riscv_restore_4: + lw s6, 0(sp) + lw s5, 4(sp) + lw s4, 8(sp) + lw s3, 12(sp) + addi sp, sp, 16 + // fallthrough into __riscv_restore_3/2/1/0 + + .globl __riscv_restore_3 + .type __riscv_restore_3,@function + .globl __riscv_restore_2 + .type __riscv_restore_2,@function + .globl __riscv_restore_1 + .type __riscv_restore_1,@function + .globl __riscv_restore_0 + .type __riscv_restore_0,@function +__riscv_restore_3: +__riscv_restore_2: +__riscv_restore_1: +__riscv_restore_0: + lw s2, 0(sp) + lw s1, 4(sp) + lw s0, 8(sp) + lw ra, 12(sp) + addi sp, sp, 16 + ret + +#elif __riscv_xlen == 64 + + .globl __riscv_restore_12 + .type __riscv_restore_12,@function +__riscv_restore_12: + ld s11, 8(sp) + addi sp, sp, 16 + // fallthrough into __riscv_restore_11/10/9/8 + + .globl __riscv_restore_11 + .type __riscv_restore_11,@function + .globl __riscv_restore_10 + .type __riscv_restore_10,@function +__riscv_restore_11: +__riscv_restore_10: + ld s10, 0(sp) + ld s9, 8(sp) + addi sp, sp, 16 + // fallthrough into __riscv_restore_9/8 + + .globl __riscv_restore_9 + .type __riscv_restore_9,@function + .globl __riscv_restore_8 + .type __riscv_restore_8,@function +__riscv_restore_9: +__riscv_restore_8: + ld s8, 0(sp) + ld s7, 8(sp) + addi sp, sp, 16 + // fallthrough into __riscv_restore_7/6 + + .globl __riscv_restore_7 + .type __riscv_restore_7,@function + .globl __riscv_restore_6 + .type __riscv_restore_6,@function +__riscv_restore_7: +__riscv_restore_6: + ld s6, 0(sp) + ld s5, 8(sp) + addi sp, sp, 16 + // fallthrough into __riscv_restore_5/4 + + .globl __riscv_restore_5 + .type __riscv_restore_5,@function + .globl __riscv_restore_4 + .type __riscv_restore_4,@function +__riscv_restore_5: +__riscv_restore_4: + ld s4, 0(sp) + ld s3, 8(sp) + addi sp, sp, 16 + // fallthrough into __riscv_restore_3/2 + + .globl __riscv_restore_3 + .type __riscv_restore_3,@function + .globl __riscv_restore_2 + .type __riscv_restore_2,@function + .globl __riscv_restore_1 + .type __riscv_restore_1,@function + .globl __riscv_restore_0 + .type __riscv_restore_0,@function +__riscv_restore_3: +__riscv_restore_2: + ld s2, 0(sp) + ld s1, 8(sp) + addi sp, sp, 16 + // fallthrough into __riscv_restore_1/0 + +__riscv_restore_1: +__riscv_restore_0: + ld s0, 0(sp) + ld ra, 8(sp) + addi sp, sp, 16 + ret + +#else +# error "xlen must be 32 or 64 for save-restore implementation +#endif Index: compiler-rt/lib/builtins/riscv/save.S =================================================================== --- /dev/null +++ compiler-rt/lib/builtins/riscv/save.S @@ -0,0 +1,184 @@ +//===-- save.S - save up to 12 callee-saved registers ---------------------===// +// +// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. +// See https://llvm.org/LICENSE.txt for license information. +// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception +// +//===----------------------------------------------------------------------===// +// +// Multiple entry points depending on number of registers to save +// +//===----------------------------------------------------------------------===// + +// The entry points are grouped up into 2s for rv64 and 4s for rv32 since this +// is the minimum grouping which will maintain the required 16-byte stack +// alignment. + + .text + +#if __riscv_xlen == 32 + + .globl __riscv_save_12 + .type __riscv_save_12,@function +__riscv_save_12: + addi sp, sp, -64 + mv t1, zero + sw s11, 12(sp) + j .Lriscv_save_11_8 + + .globl __riscv_save_11 + .type __riscv_save_11,@function + .globl __riscv_save_10 + .type __riscv_save_10,@function + .globl __riscv_save_9 + .type __riscv_save_9,@function + .globl __riscv_save_8 + .type __riscv_save_8,@function +__riscv_save_11: +__riscv_save_10: +__riscv_save_9: +__riscv_save_8: + addi sp, sp, -64 + li t1, 16 +.Lriscv_save_11_8: + sw s10, 16(sp) + sw s9, 20(sp) + sw s8, 24(sp) + sw s7, 28(sp) + j .Lriscv_save_7_4 + + .globl __riscv_save_7 + .type __riscv_save_7,@function + .globl __riscv_save_6 + .type __riscv_save_6,@function + .globl __riscv_save_5 + .type __riscv_save_5,@function + .globl __riscv_save_4 + .type __riscv_save_4,@function +__riscv_save_7: +__riscv_save_6: +__riscv_save_5: +__riscv_save_4: + addi sp, sp, -64 + li t1, 32 +.Lriscv_save_7_4: + sw s6, 32(sp) + sw s5, 36(sp) + sw s4, 40(sp) + sw s3, 44(sp) + sw s2, 48(sp) + sw s1, 52(sp) + sw s0, 56(sp) + sw ra, 60(sp) + add sp, sp, t1 + jr t0 + + .globl __riscv_save_3 + .type __riscv_save_3,@function + .globl __riscv_save_2 + .type __riscv_save_2,@function + .globl __riscv_save_1 + .type __riscv_save_1,@function + .globl __riscv_save_0 + .type __riscv_save_0,@function +__riscv_save_3: +__riscv_save_2: +__riscv_save_1: +__riscv_save_0: + addi sp, sp, -16 + sw s2, 0(sp) + sw s1, 4(sp) + sw s0, 8(sp) + sw ra, 12(sp) + jr t0 + +#elif __riscv_xlen == 64 + + .globl __riscv_save_12 + .type __riscv_save_12,@function +__riscv_save_12: + addi sp, sp, -112 + mv t1, zero + sd s11, 8(sp) + j .Lriscv_save_11_10 + + .globl __riscv_save_11 + .type __riscv_save_11,@function + .globl __riscv_save_10 + .type __riscv_save_10,@function +__riscv_save_11: +__riscv_save_10: + addi sp, sp, -112 + li t1, 16 +.Lriscv_save_11_10: + sd s10, 16(sp) + sd s9, 24(sp) + j .Lriscv_save_9_8 + + .globl __riscv_save_9 + .type __riscv_save_9,@function + .globl __riscv_save_8 + .type __riscv_save_8,@function +__riscv_save_9: +__riscv_save_8: + addi sp, sp, -112 + li t1, 32 +.Lriscv_save_9_8: + sd s8, 32(sp) + sd s7, 40(sp) + j .Lriscv_save_7_6 + + .globl __riscv_save_7 + .type __riscv_save_7,@function + .globl __riscv_save_6 + .type __riscv_save_6,@function +__riscv_save_7: +__riscv_save_6: + addi sp, sp, -112 + li t1, 48 +.Lriscv_save_7_6: + sd s6, 48(sp) + sd s5, 56(sp) + j .Lriscv_save_5_4 + + .globl __riscv_save_5 + .type __riscv_save_5,@function + .globl __riscv_save_4 + .type __riscv_save_4,@function +__riscv_save_5: +__riscv_save_4: + addi sp, sp, -112 + li t1, 64 +.Lriscv_save_5_4: + sd s4, 64(sp) + sd s3, 72(sp) + j .Lriscv_save_3_2 + + .globl __riscv_save_3 + .type __riscv_save_3,@function + .globl __riscv_save_2 + .type __riscv_save_2,@function +__riscv_save_3: +__riscv_save_2: + addi sp, sp, -112 + li t1, 80 +.Lriscv_save_3_2: + sd s2, 80(sp) + sd s1, 88(sp) + sd s0, 96(sp) + sd ra, 104(sp) + add sp, sp, t1 + jr t0 + + .globl __riscv_save_1 + .type __riscv_save_1,@function + .globl __riscv_save_0 + .type __riscv_save_0,@function + addi sp, sp, -16 + sd s0, 0(sp) + sd ra, 8(sp) + jr t0 + +#else +# error "xlen must be 32 or 64 for save-restore implementation +#endif