1//===-- restore.S - restore up to 12 callee-save registers ----------------===// 2// 3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4// See https://llvm.org/LICENSE.txt for license information. 5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6// 7//===----------------------------------------------------------------------===// 8// 9// Multiple entry points depending on number of registers to restore 10// 11//===----------------------------------------------------------------------===// 12 13// All of the entry points are in the same section since we rely on many of 14// them falling through into each other and don't want the linker to 15// accidentally split them up, garbage collect, or reorder them. 16// 17// The entry points are grouped up into 2s for rv64 and 4s for rv32 since this 18// is the minimum grouping which will maintain the required 16-byte stack 19// alignment. 20 21 .text 22 23#if __riscv_xlen == 32 24 25 .globl __riscv_restore_12 26 .type __riscv_restore_12,@function 27__riscv_restore_12: 28 lw s11, 12(sp) 29 addi sp, sp, 16 30 // fallthrough into __riscv_restore_11/10/9/8 31 32 .globl __riscv_restore_11 33 .type __riscv_restore_11,@function 34 .globl __riscv_restore_10 35 .type __riscv_restore_10,@function 36 .globl __riscv_restore_9 37 .type __riscv_restore_9,@function 38 .globl __riscv_restore_8 39 .type __riscv_restore_8,@function 40__riscv_restore_11: 41__riscv_restore_10: 42__riscv_restore_9: 43__riscv_restore_8: 44 lw s10, 0(sp) 45 lw s9, 4(sp) 46 lw s8, 8(sp) 47 lw s7, 12(sp) 48 addi sp, sp, 16 49 // fallthrough into __riscv_restore_7/6/5/4 50 51 .globl __riscv_restore_7 52 .type __riscv_restore_7,@function 53 .globl __riscv_restore_6 54 .type __riscv_restore_6,@function 55 .globl __riscv_restore_5 56 .type __riscv_restore_5,@function 57 .globl __riscv_restore_4 58 .type __riscv_restore_4,@function 59__riscv_restore_7: 60__riscv_restore_6: 61__riscv_restore_5: 62__riscv_restore_4: 63 lw s6, 0(sp) 64 lw s5, 4(sp) 65 lw s4, 8(sp) 66 lw s3, 12(sp) 67 addi sp, sp, 16 68 // fallthrough into __riscv_restore_3/2/1/0 69 70 .globl __riscv_restore_3 71 .type __riscv_restore_3,@function 72 .globl __riscv_restore_2 73 .type __riscv_restore_2,@function 74 .globl __riscv_restore_1 75 .type __riscv_restore_1,@function 76 .globl __riscv_restore_0 77 .type __riscv_restore_0,@function 78__riscv_restore_3: 79__riscv_restore_2: 80__riscv_restore_1: 81__riscv_restore_0: 82 lw s2, 0(sp) 83 lw s1, 4(sp) 84 lw s0, 8(sp) 85 lw ra, 12(sp) 86 addi sp, sp, 16 87 ret 88 89#elif __riscv_xlen == 64 90 91 .globl __riscv_restore_12 92 .type __riscv_restore_12,@function 93__riscv_restore_12: 94 ld s11, 8(sp) 95 addi sp, sp, 16 96 // fallthrough into __riscv_restore_11/10 97 98 .globl __riscv_restore_11 99 .type __riscv_restore_11,@function 100 .globl __riscv_restore_10 101 .type __riscv_restore_10,@function 102__riscv_restore_11: 103__riscv_restore_10: 104 ld s10, 0(sp) 105 ld s9, 8(sp) 106 addi sp, sp, 16 107 // fallthrough into __riscv_restore_9/8 108 109 .globl __riscv_restore_9 110 .type __riscv_restore_9,@function 111 .globl __riscv_restore_8 112 .type __riscv_restore_8,@function 113__riscv_restore_9: 114__riscv_restore_8: 115 ld s8, 0(sp) 116 ld s7, 8(sp) 117 addi sp, sp, 16 118 // fallthrough into __riscv_restore_7/6 119 120 .globl __riscv_restore_7 121 .type __riscv_restore_7,@function 122 .globl __riscv_restore_6 123 .type __riscv_restore_6,@function 124__riscv_restore_7: 125__riscv_restore_6: 126 ld s6, 0(sp) 127 ld s5, 8(sp) 128 addi sp, sp, 16 129 // fallthrough into __riscv_restore_5/4 130 131 .globl __riscv_restore_5 132 .type __riscv_restore_5,@function 133 .globl __riscv_restore_4 134 .type __riscv_restore_4,@function 135__riscv_restore_5: 136__riscv_restore_4: 137 ld s4, 0(sp) 138 ld s3, 8(sp) 139 addi sp, sp, 16 140 // fallthrough into __riscv_restore_3/2 141 142 .globl __riscv_restore_3 143 .type __riscv_restore_3,@function 144 .globl __riscv_restore_2 145 .type __riscv_restore_2,@function 146__riscv_restore_3: 147__riscv_restore_2: 148 ld s2, 0(sp) 149 ld s1, 8(sp) 150 addi sp, sp, 16 151 // fallthrough into __riscv_restore_1/0 152 153 .globl __riscv_restore_1 154 .type __riscv_restore_1,@function 155 .globl __riscv_restore_0 156 .type __riscv_restore_0,@function 157__riscv_restore_1: 158__riscv_restore_0: 159 ld s0, 0(sp) 160 ld ra, 8(sp) 161 addi sp, sp, 16 162 ret 163 164#else 165# error "xlen must be 32 or 64 for save-restore implementation 166#endif 167