Index: compiler-rt/lib/builtins/CMakeLists.txt =================================================================== --- compiler-rt/lib/builtins/CMakeLists.txt +++ compiler-rt/lib/builtins/CMakeLists.txt @@ -590,7 +590,12 @@ endif() set(powerpc64le_SOURCES ${powerpc64_SOURCES}) -set(riscv_SOURCES ${GENERIC_SOURCES} ${GENERIC_TF_SOURCES}) +set(riscv_SOURCES + riscv/save.S + riscv/restore.S + ${GENERIC_SOURCES} + ${GENERIC_TF_SOURCES} +) set(riscv32_SOURCES riscv/mulsi3.S ${riscv_SOURCES} Index: compiler-rt/lib/builtins/riscv/restore.S =================================================================== --- /dev/null +++ compiler-rt/lib/builtins/riscv/restore.S @@ -0,0 +1,83 @@ +//===-- restore.S - restore up to 12 callee-save registers ----------------===// +// +// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. +// See https://llvm.org/LICENSE.txt for license information. +// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception +// +//===----------------------------------------------------------------------===// +// +// Multiple entry points depending on number of registers to restore +// +//===----------------------------------------------------------------------===// + +#include "save_restore.h" + + // All of the entry points are in the same section since we rely on many of + // them falling through into each other and don't want the linker to + // accidentally split them up, garbage collect, or reorder them. + .text + .align 2 + + .globl __riscv_restore_12 + .type __riscv_restore_12,@function +__riscv_restore_12: + LOAD s11, (3*STRIDE)(sp) + addi sp, sp, 4*STRIDE + // fallthrough into __riscv_restore_11/10/9/8 + + .globl __riscv_restore_11 + .type __riscv_restore_11,@function + .globl __riscv_restore_10 + .type __riscv_restore_10,@function + .globl __riscv_restore_9 + .type __riscv_restore_9,@function + .globl __riscv_restore_8 + .type __riscv_restore_8,@function +__riscv_restore_11: +__riscv_restore_10: +__riscv_restore_9: +__riscv_restore_8: + LOAD s10, (0*STRIDE)(sp) + LOAD s9, (1*STRIDE)(sp) + LOAD s8, (2*STRIDE)(sp) + LOAD s7, (3*STRIDE)(sp) + addi sp, sp, 4*STRIDE + // fallthrough into __riscv_restore_7/6/5/4 + + .globl __riscv_restore_7 + .type __riscv_restore_7,@function + .globl __riscv_restore_6 + .type __riscv_restore_6,@function + .globl __riscv_restore_5 + .type __riscv_restore_5,@function + .globl __riscv_restore_4 + .type __riscv_restore_4,@function +__riscv_restore_7: +__riscv_restore_6: +__riscv_restore_5: +__riscv_restore_4: + LOAD s6, (0*STRIDE)(sp) + LOAD s5, (1*STRIDE)(sp) + LOAD s4, (2*STRIDE)(sp) + LOAD s3, (3*STRIDE)(sp) + addi sp, sp, 4*STRIDE + // fallthrough into __riscv_restore_3/2/1/0 + + .globl __riscv_restore_3 + .type __riscv_restore_3,@function + .globl __riscv_restore_2 + .type __riscv_restore_2,@function + .globl __riscv_restore_1 + .type __riscv_restore_1,@function + .globl __riscv_restore_0 + .type __riscv_restore_0,@function +__riscv_restore_3: +__riscv_restore_2: +__riscv_restore_1: +__riscv_restore_0: + LOAD s2, (0*STRIDE)(sp) + LOAD s1, (1*STRIDE)(sp) + LOAD s0, (2*STRIDE)(sp) + LOAD ra, (3*STRIDE)(sp) + addi sp, sp, 4*STRIDE + ret Index: compiler-rt/lib/builtins/riscv/save.S =================================================================== --- /dev/null +++ compiler-rt/lib/builtins/riscv/save.S @@ -0,0 +1,90 @@ +//===-- save.S - save up to 12 callee-saved registers ---------------------===// +// +// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. +// See https://llvm.org/LICENSE.txt for license information. +// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception +// +//===----------------------------------------------------------------------===// +// +// Multiple entry points depending on number of registers to save +// +//===----------------------------------------------------------------------===// + +#include "save_restore.h" + + .text + .align 2 + + .globl __riscv_save_12 + .type __riscv_save_12,@function +__riscv_save_12: + addi sp, sp, -16*STRIDE + mv t1, zero + STORE s11, (3*STRIDE)(sp) + j .Lriscv_save_11_8 + + .globl __riscv_save_11 + .type __riscv_save_11,@function + .globl __riscv_save_10 + .type __riscv_save_10,@function + .globl __riscv_save_9 + .type __riscv_save_9,@function + .globl __riscv_save_8 + .type __riscv_save_8,@function +__riscv_save_11: +__riscv_save_10: +__riscv_save_9: +__riscv_save_8: + addi sp, sp, -16*STRIDE + li t1, -4*STRIDE +.Lriscv_save_11_8: + STORE s10, (4*STRIDE)(sp) + STORE s9, (5*STRIDE)(sp) + STORE s8, (6*STRIDE)(sp) + STORE s7, (7*STRIDE)(sp) + j .Lriscv_save_7_4 + + .globl __riscv_save_7 + .type __riscv_save_7,@function + .globl __riscv_save_6 + .type __riscv_save_6,@function + .globl __riscv_save_5 + .type __riscv_save_5,@function + .globl __riscv_save_4 + .type __riscv_save_4,@function +__riscv_save_7: +__riscv_save_6: +__riscv_save_5: +__riscv_save_4: + addi sp, sp, -16*STRIDE + li t1, -8*STRIDE +.Lriscv_save_7_4: + STORE s6, (8*STRIDE)(sp) + STORE s5, (9*STRIDE)(sp) + STORE s4, (10*STRIDE)(sp) + STORE s3, (11*STRIDE)(sp) + STORE s2, (12*STRIDE)(sp) + STORE s1, (13*STRIDE)(sp) + STORE s0, (14*STRIDE)(sp) + STORE ra, (15*STRIDE)(sp) + sub sp, sp, t1 + jr t0 + + .globl __riscv_save_3 + .type __riscv_save_3,@function + .globl __riscv_save_2 + .type __riscv_save_2,@function + .globl __riscv_save_1 + .type __riscv_save_1,@function + .globl __riscv_save_0 + .type __riscv_save_0,@function +__riscv_save_3: +__riscv_save_2: +__riscv_save_1: +__riscv_save_0: + addi sp, sp, -4*STRIDE + STORE s2, (0*STRIDE)(sp) + STORE s1, (1*STRIDE)(sp) + STORE s0, (2*STRIDE)(sp) + STORE ra, (3*STRIDE)(sp) + jr t0 Index: compiler-rt/lib/builtins/riscv/save_restore.h =================================================================== --- /dev/null +++ compiler-rt/lib/builtins/riscv/save_restore.h @@ -0,0 +1,20 @@ + +// Helper macros: +// LOAD The appropriate mnemonic for the load instruction which +// depends on XLEN. +// STORE The appropriate mnemonic for the store instruction which +// depends on XLEN. +// STRIDE The width of the loads/stores in bytes, equivalent to the stride +// between each save/restored register. +#if __riscv_xlen == 32 + #define LOAD lw + #define STORE sw + #define STRIDE 4 +#elif __riscv_xlen == 64 + #define LOAD ld + #define STORE sd + #define STRIDE 8 +#else + #error "xlen must be 32 or 64 for save-restore implementation +#endif +