1 | //===-- restore.S - restore up to 12 callee-save registers ----------------===// |
2 | // |
3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | // See https://llvm.org/LICENSE.txt for license information. |
5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | // |
7 | //===----------------------------------------------------------------------===// |
8 | // |
9 | // Multiple entry points depending on number of registers to restore |
10 | // |
11 | //===----------------------------------------------------------------------===// |
12 | |
13 | // All of the entry points are in the same section since we rely on many of |
14 | // them falling through into each other and don't want the linker to |
15 | // accidentally split them up, garbage collect, or reorder them. |
16 | // |
17 | // For the conventional ABIs, entry points are grouped up into 2s for rv64 and |
18 | // 4s for rv32 since this is the minimum grouping which will maintain the |
19 | // required 16-byte stack alignment. |
20 | // |
21 | // For the ilp32e/lp64e abis, entry points are grouped into 1s, since this is |
22 | // the minimum grouping which will maintain the required 4-byte stack alignment. |
23 | |
24 | .text |
25 | |
26 | #if __riscv_xlen == 32 |
27 | |
28 | #ifndef __riscv_abi_rve |
29 | |
30 | .globl __riscv_restore_12 |
31 | .type __riscv_restore_12,@function |
32 | __riscv_restore_12: |
33 | lw s11, 12(sp) |
34 | addi sp, sp, 16 |
35 | // fallthrough into __riscv_restore_11/10/9/8 |
36 | |
37 | .globl __riscv_restore_11 |
38 | .type __riscv_restore_11,@function |
39 | .globl __riscv_restore_10 |
40 | .type __riscv_restore_10,@function |
41 | .globl __riscv_restore_9 |
42 | .type __riscv_restore_9,@function |
43 | .globl __riscv_restore_8 |
44 | .type __riscv_restore_8,@function |
45 | __riscv_restore_11: |
46 | __riscv_restore_10: |
47 | __riscv_restore_9: |
48 | __riscv_restore_8: |
49 | lw s10, 0(sp) |
50 | lw s9, 4(sp) |
51 | lw s8, 8(sp) |
52 | lw s7, 12(sp) |
53 | addi sp, sp, 16 |
54 | // fallthrough into __riscv_restore_7/6/5/4 |
55 | |
56 | .globl __riscv_restore_7 |
57 | .type __riscv_restore_7,@function |
58 | .globl __riscv_restore_6 |
59 | .type __riscv_restore_6,@function |
60 | .globl __riscv_restore_5 |
61 | .type __riscv_restore_5,@function |
62 | .globl __riscv_restore_4 |
63 | .type __riscv_restore_4,@function |
64 | __riscv_restore_7: |
65 | __riscv_restore_6: |
66 | __riscv_restore_5: |
67 | __riscv_restore_4: |
68 | lw s6, 0(sp) |
69 | lw s5, 4(sp) |
70 | lw s4, 8(sp) |
71 | lw s3, 12(sp) |
72 | addi sp, sp, 16 |
73 | // fallthrough into __riscv_restore_3/2/1/0 |
74 | |
75 | .globl __riscv_restore_3 |
76 | .type __riscv_restore_3,@function |
77 | .globl __riscv_restore_2 |
78 | .type __riscv_restore_2,@function |
79 | .globl __riscv_restore_1 |
80 | .type __riscv_restore_1,@function |
81 | .globl __riscv_restore_0 |
82 | .type __riscv_restore_0,@function |
83 | __riscv_restore_3: |
84 | __riscv_restore_2: |
85 | __riscv_restore_1: |
86 | __riscv_restore_0: |
87 | lw s2, 0(sp) |
88 | lw s1, 4(sp) |
89 | lw s0, 8(sp) |
90 | lw ra, 12(sp) |
91 | addi sp, sp, 16 |
92 | ret |
93 | |
94 | #else |
95 | |
96 | .globl __riscv_restore_2 |
97 | .type __riscv_restore_2,@function |
98 | __riscv_restore_2: |
99 | lw s1, 0(sp) |
100 | addi sp, sp, 4 |
101 | // fallthrough into __riscv_restore_1/0 |
102 | |
103 | .globl __riscv_restore_1 |
104 | .type __riscv_restore_1,@function |
105 | __riscv_restore_1: |
106 | lw s0, 0(sp) |
107 | addi sp, sp, 4 |
108 | // fallthrough into __riscv_restore_0 |
109 | |
110 | .globl __riscv_restore_0 |
111 | .type __riscv_restore_0,@function |
112 | __riscv_restore_0: |
113 | lw ra, 0(sp) |
114 | addi sp, sp, 4 |
115 | ret |
116 | |
117 | #endif |
118 | |
119 | #elif __riscv_xlen == 64 |
120 | |
121 | #ifndef __riscv_abi_rve |
122 | |
123 | .globl __riscv_restore_12 |
124 | .type __riscv_restore_12,@function |
125 | __riscv_restore_12: |
126 | ld s11, 8(sp) |
127 | addi sp, sp, 16 |
128 | // fallthrough into __riscv_restore_11/10 |
129 | |
130 | .globl __riscv_restore_11 |
131 | .type __riscv_restore_11,@function |
132 | .globl __riscv_restore_10 |
133 | .type __riscv_restore_10,@function |
134 | __riscv_restore_11: |
135 | __riscv_restore_10: |
136 | ld s10, 0(sp) |
137 | ld s9, 8(sp) |
138 | addi sp, sp, 16 |
139 | // fallthrough into __riscv_restore_9/8 |
140 | |
141 | .globl __riscv_restore_9 |
142 | .type __riscv_restore_9,@function |
143 | .globl __riscv_restore_8 |
144 | .type __riscv_restore_8,@function |
145 | __riscv_restore_9: |
146 | __riscv_restore_8: |
147 | ld s8, 0(sp) |
148 | ld s7, 8(sp) |
149 | addi sp, sp, 16 |
150 | // fallthrough into __riscv_restore_7/6 |
151 | |
152 | .globl __riscv_restore_7 |
153 | .type __riscv_restore_7,@function |
154 | .globl __riscv_restore_6 |
155 | .type __riscv_restore_6,@function |
156 | __riscv_restore_7: |
157 | __riscv_restore_6: |
158 | ld s6, 0(sp) |
159 | ld s5, 8(sp) |
160 | addi sp, sp, 16 |
161 | // fallthrough into __riscv_restore_5/4 |
162 | |
163 | .globl __riscv_restore_5 |
164 | .type __riscv_restore_5,@function |
165 | .globl __riscv_restore_4 |
166 | .type __riscv_restore_4,@function |
167 | __riscv_restore_5: |
168 | __riscv_restore_4: |
169 | ld s4, 0(sp) |
170 | ld s3, 8(sp) |
171 | addi sp, sp, 16 |
172 | // fallthrough into __riscv_restore_3/2 |
173 | |
174 | .globl __riscv_restore_3 |
175 | .type __riscv_restore_3,@function |
176 | .globl __riscv_restore_2 |
177 | .type __riscv_restore_2,@function |
178 | __riscv_restore_3: |
179 | __riscv_restore_2: |
180 | ld s2, 0(sp) |
181 | ld s1, 8(sp) |
182 | addi sp, sp, 16 |
183 | // fallthrough into __riscv_restore_1/0 |
184 | |
185 | .globl __riscv_restore_1 |
186 | .type __riscv_restore_1,@function |
187 | .globl __riscv_restore_0 |
188 | .type __riscv_restore_0,@function |
189 | __riscv_restore_1: |
190 | __riscv_restore_0: |
191 | ld s0, 0(sp) |
192 | ld ra, 8(sp) |
193 | addi sp, sp, 16 |
194 | ret |
195 | |
196 | #else |
197 | |
198 | .globl __riscv_restore_2 |
199 | .type __riscv_restore_2,@function |
200 | __riscv_restore_2: |
201 | ld s1, 0(sp) |
202 | addi sp, sp, 8 |
203 | // fallthrough into __riscv_restore_1/0 |
204 | |
205 | .globl __riscv_restore_1 |
206 | .type __riscv_restore_1,@function |
207 | __riscv_restore_1: |
208 | ld s0, 0(sp) |
209 | addi sp, sp, 8 |
210 | // fallthrough into __riscv_restore_0 |
211 | |
212 | .globl __riscv_restore_0 |
213 | .type __riscv_restore_0,@function |
214 | __riscv_restore_0: |
215 | ld ra, 0(sp) |
216 | addi sp, sp, 8 |
217 | ret |
218 | |
219 | #endif |
220 | |
221 | #else |
222 | # error "xlen must be 32 or 64 for save-restore implementation |
223 | #endif |
224 | |