1 | /* SPDX-License-Identifier: GPL-2.0 */ |
2 | /* |
3 | * Copyright (C) 2019 Western Digital Corporation or its affiliates. |
4 | * |
5 | * Authors: |
6 | * Anup Patel <anup.patel@wdc.com> |
7 | */ |
8 | |
9 | #include <linux/linkage.h> |
10 | #include <asm/asm.h> |
11 | #include <asm/asm-offsets.h> |
12 | #include <asm/csr.h> |
13 | |
14 | .text |
15 | .altmacro |
16 | .option norelax |
17 | |
18 | SYM_FUNC_START(__kvm_riscv_switch_to) |
19 | /* Save Host GPRs (except A0 and T0-T6) */ |
20 | REG_S ra, (KVM_ARCH_HOST_RA)(a0) |
21 | REG_S sp, (KVM_ARCH_HOST_SP)(a0) |
22 | REG_S gp, (KVM_ARCH_HOST_GP)(a0) |
23 | REG_S tp, (KVM_ARCH_HOST_TP)(a0) |
24 | REG_S s0, (KVM_ARCH_HOST_S0)(a0) |
25 | REG_S s1, (KVM_ARCH_HOST_S1)(a0) |
26 | REG_S a1, (KVM_ARCH_HOST_A1)(a0) |
27 | REG_S a2, (KVM_ARCH_HOST_A2)(a0) |
28 | REG_S a3, (KVM_ARCH_HOST_A3)(a0) |
29 | REG_S a4, (KVM_ARCH_HOST_A4)(a0) |
30 | REG_S a5, (KVM_ARCH_HOST_A5)(a0) |
31 | REG_S a6, (KVM_ARCH_HOST_A6)(a0) |
32 | REG_S a7, (KVM_ARCH_HOST_A7)(a0) |
33 | REG_S s2, (KVM_ARCH_HOST_S2)(a0) |
34 | REG_S s3, (KVM_ARCH_HOST_S3)(a0) |
35 | REG_S s4, (KVM_ARCH_HOST_S4)(a0) |
36 | REG_S s5, (KVM_ARCH_HOST_S5)(a0) |
37 | REG_S s6, (KVM_ARCH_HOST_S6)(a0) |
38 | REG_S s7, (KVM_ARCH_HOST_S7)(a0) |
39 | REG_S s8, (KVM_ARCH_HOST_S8)(a0) |
40 | REG_S s9, (KVM_ARCH_HOST_S9)(a0) |
41 | REG_S s10, (KVM_ARCH_HOST_S10)(a0) |
42 | REG_S s11, (KVM_ARCH_HOST_S11)(a0) |
43 | |
44 | /* Load Guest CSR values */ |
45 | REG_L t0, (KVM_ARCH_GUEST_SSTATUS)(a0) |
46 | REG_L t1, (KVM_ARCH_GUEST_HSTATUS)(a0) |
47 | REG_L t2, (KVM_ARCH_GUEST_SCOUNTEREN)(a0) |
48 | la t4, .Lkvm_switch_return |
49 | REG_L t5, (KVM_ARCH_GUEST_SEPC)(a0) |
50 | |
51 | /* Save Host and Restore Guest SSTATUS */ |
52 | csrrw t0, CSR_SSTATUS, t0 |
53 | |
54 | /* Save Host and Restore Guest HSTATUS */ |
55 | csrrw t1, CSR_HSTATUS, t1 |
56 | |
57 | /* Save Host and Restore Guest SCOUNTEREN */ |
58 | csrrw t2, CSR_SCOUNTEREN, t2 |
59 | |
60 | /* Save Host STVEC and change it to return path */ |
61 | csrrw t4, CSR_STVEC, t4 |
62 | |
63 | /* Save Host SSCRATCH and change it to struct kvm_vcpu_arch pointer */ |
64 | csrrw t3, CSR_SSCRATCH, a0 |
65 | |
66 | /* Restore Guest SEPC */ |
67 | csrw CSR_SEPC, t5 |
68 | |
69 | /* Store Host CSR values */ |
70 | REG_S t0, (KVM_ARCH_HOST_SSTATUS)(a0) |
71 | REG_S t1, (KVM_ARCH_HOST_HSTATUS)(a0) |
72 | REG_S t2, (KVM_ARCH_HOST_SCOUNTEREN)(a0) |
73 | REG_S t3, (KVM_ARCH_HOST_SSCRATCH)(a0) |
74 | REG_S t4, (KVM_ARCH_HOST_STVEC)(a0) |
75 | |
76 | /* Restore Guest GPRs (except A0) */ |
77 | REG_L ra, (KVM_ARCH_GUEST_RA)(a0) |
78 | REG_L sp, (KVM_ARCH_GUEST_SP)(a0) |
79 | REG_L gp, (KVM_ARCH_GUEST_GP)(a0) |
80 | REG_L tp, (KVM_ARCH_GUEST_TP)(a0) |
81 | REG_L t0, (KVM_ARCH_GUEST_T0)(a0) |
82 | REG_L t1, (KVM_ARCH_GUEST_T1)(a0) |
83 | REG_L t2, (KVM_ARCH_GUEST_T2)(a0) |
84 | REG_L s0, (KVM_ARCH_GUEST_S0)(a0) |
85 | REG_L s1, (KVM_ARCH_GUEST_S1)(a0) |
86 | REG_L a1, (KVM_ARCH_GUEST_A1)(a0) |
87 | REG_L a2, (KVM_ARCH_GUEST_A2)(a0) |
88 | REG_L a3, (KVM_ARCH_GUEST_A3)(a0) |
89 | REG_L a4, (KVM_ARCH_GUEST_A4)(a0) |
90 | REG_L a5, (KVM_ARCH_GUEST_A5)(a0) |
91 | REG_L a6, (KVM_ARCH_GUEST_A6)(a0) |
92 | REG_L a7, (KVM_ARCH_GUEST_A7)(a0) |
93 | REG_L s2, (KVM_ARCH_GUEST_S2)(a0) |
94 | REG_L s3, (KVM_ARCH_GUEST_S3)(a0) |
95 | REG_L s4, (KVM_ARCH_GUEST_S4)(a0) |
96 | REG_L s5, (KVM_ARCH_GUEST_S5)(a0) |
97 | REG_L s6, (KVM_ARCH_GUEST_S6)(a0) |
98 | REG_L s7, (KVM_ARCH_GUEST_S7)(a0) |
99 | REG_L s8, (KVM_ARCH_GUEST_S8)(a0) |
100 | REG_L s9, (KVM_ARCH_GUEST_S9)(a0) |
101 | REG_L s10, (KVM_ARCH_GUEST_S10)(a0) |
102 | REG_L s11, (KVM_ARCH_GUEST_S11)(a0) |
103 | REG_L t3, (KVM_ARCH_GUEST_T3)(a0) |
104 | REG_L t4, (KVM_ARCH_GUEST_T4)(a0) |
105 | REG_L t5, (KVM_ARCH_GUEST_T5)(a0) |
106 | REG_L t6, (KVM_ARCH_GUEST_T6)(a0) |
107 | |
108 | /* Restore Guest A0 */ |
109 | REG_L a0, (KVM_ARCH_GUEST_A0)(a0) |
110 | |
111 | /* Resume Guest */ |
112 | sret |
113 | |
114 | /* Back to Host */ |
115 | .align 2 |
116 | .Lkvm_switch_return: |
117 | /* Swap Guest A0 with SSCRATCH */ |
118 | csrrw a0, CSR_SSCRATCH, a0 |
119 | |
120 | /* Save Guest GPRs (except A0) */ |
121 | REG_S ra, (KVM_ARCH_GUEST_RA)(a0) |
122 | REG_S sp, (KVM_ARCH_GUEST_SP)(a0) |
123 | REG_S gp, (KVM_ARCH_GUEST_GP)(a0) |
124 | REG_S tp, (KVM_ARCH_GUEST_TP)(a0) |
125 | REG_S t0, (KVM_ARCH_GUEST_T0)(a0) |
126 | REG_S t1, (KVM_ARCH_GUEST_T1)(a0) |
127 | REG_S t2, (KVM_ARCH_GUEST_T2)(a0) |
128 | REG_S s0, (KVM_ARCH_GUEST_S0)(a0) |
129 | REG_S s1, (KVM_ARCH_GUEST_S1)(a0) |
130 | REG_S a1, (KVM_ARCH_GUEST_A1)(a0) |
131 | REG_S a2, (KVM_ARCH_GUEST_A2)(a0) |
132 | REG_S a3, (KVM_ARCH_GUEST_A3)(a0) |
133 | REG_S a4, (KVM_ARCH_GUEST_A4)(a0) |
134 | REG_S a5, (KVM_ARCH_GUEST_A5)(a0) |
135 | REG_S a6, (KVM_ARCH_GUEST_A6)(a0) |
136 | REG_S a7, (KVM_ARCH_GUEST_A7)(a0) |
137 | REG_S s2, (KVM_ARCH_GUEST_S2)(a0) |
138 | REG_S s3, (KVM_ARCH_GUEST_S3)(a0) |
139 | REG_S s4, (KVM_ARCH_GUEST_S4)(a0) |
140 | REG_S s5, (KVM_ARCH_GUEST_S5)(a0) |
141 | REG_S s6, (KVM_ARCH_GUEST_S6)(a0) |
142 | REG_S s7, (KVM_ARCH_GUEST_S7)(a0) |
143 | REG_S s8, (KVM_ARCH_GUEST_S8)(a0) |
144 | REG_S s9, (KVM_ARCH_GUEST_S9)(a0) |
145 | REG_S s10, (KVM_ARCH_GUEST_S10)(a0) |
146 | REG_S s11, (KVM_ARCH_GUEST_S11)(a0) |
147 | REG_S t3, (KVM_ARCH_GUEST_T3)(a0) |
148 | REG_S t4, (KVM_ARCH_GUEST_T4)(a0) |
149 | REG_S t5, (KVM_ARCH_GUEST_T5)(a0) |
150 | REG_S t6, (KVM_ARCH_GUEST_T6)(a0) |
151 | |
152 | /* Load Host CSR values */ |
153 | REG_L t1, (KVM_ARCH_HOST_STVEC)(a0) |
154 | REG_L t2, (KVM_ARCH_HOST_SSCRATCH)(a0) |
155 | REG_L t3, (KVM_ARCH_HOST_SCOUNTEREN)(a0) |
156 | REG_L t4, (KVM_ARCH_HOST_HSTATUS)(a0) |
157 | REG_L t5, (KVM_ARCH_HOST_SSTATUS)(a0) |
158 | |
159 | /* Save Guest SEPC */ |
160 | csrr t0, CSR_SEPC |
161 | |
162 | /* Save Guest A0 and Restore Host SSCRATCH */ |
163 | csrrw t2, CSR_SSCRATCH, t2 |
164 | |
165 | /* Restore Host STVEC */ |
166 | csrw CSR_STVEC, t1 |
167 | |
168 | /* Save Guest and Restore Host SCOUNTEREN */ |
169 | csrrw t3, CSR_SCOUNTEREN, t3 |
170 | |
171 | /* Save Guest and Restore Host HSTATUS */ |
172 | csrrw t4, CSR_HSTATUS, t4 |
173 | |
174 | /* Save Guest and Restore Host SSTATUS */ |
175 | csrrw t5, CSR_SSTATUS, t5 |
176 | |
177 | /* Store Guest CSR values */ |
178 | REG_S t0, (KVM_ARCH_GUEST_SEPC)(a0) |
179 | REG_S t2, (KVM_ARCH_GUEST_A0)(a0) |
180 | REG_S t3, (KVM_ARCH_GUEST_SCOUNTEREN)(a0) |
181 | REG_S t4, (KVM_ARCH_GUEST_HSTATUS)(a0) |
182 | REG_S t5, (KVM_ARCH_GUEST_SSTATUS)(a0) |
183 | |
184 | /* Restore Host GPRs (except A0 and T0-T6) */ |
185 | REG_L ra, (KVM_ARCH_HOST_RA)(a0) |
186 | REG_L sp, (KVM_ARCH_HOST_SP)(a0) |
187 | REG_L gp, (KVM_ARCH_HOST_GP)(a0) |
188 | REG_L tp, (KVM_ARCH_HOST_TP)(a0) |
189 | REG_L s0, (KVM_ARCH_HOST_S0)(a0) |
190 | REG_L s1, (KVM_ARCH_HOST_S1)(a0) |
191 | REG_L a1, (KVM_ARCH_HOST_A1)(a0) |
192 | REG_L a2, (KVM_ARCH_HOST_A2)(a0) |
193 | REG_L a3, (KVM_ARCH_HOST_A3)(a0) |
194 | REG_L a4, (KVM_ARCH_HOST_A4)(a0) |
195 | REG_L a5, (KVM_ARCH_HOST_A5)(a0) |
196 | REG_L a6, (KVM_ARCH_HOST_A6)(a0) |
197 | REG_L a7, (KVM_ARCH_HOST_A7)(a0) |
198 | REG_L s2, (KVM_ARCH_HOST_S2)(a0) |
199 | REG_L s3, (KVM_ARCH_HOST_S3)(a0) |
200 | REG_L s4, (KVM_ARCH_HOST_S4)(a0) |
201 | REG_L s5, (KVM_ARCH_HOST_S5)(a0) |
202 | REG_L s6, (KVM_ARCH_HOST_S6)(a0) |
203 | REG_L s7, (KVM_ARCH_HOST_S7)(a0) |
204 | REG_L s8, (KVM_ARCH_HOST_S8)(a0) |
205 | REG_L s9, (KVM_ARCH_HOST_S9)(a0) |
206 | REG_L s10, (KVM_ARCH_HOST_S10)(a0) |
207 | REG_L s11, (KVM_ARCH_HOST_S11)(a0) |
208 | |
209 | /* Return to C code */ |
210 | ret |
211 | SYM_FUNC_END(__kvm_riscv_switch_to) |
212 | |
213 | SYM_CODE_START(__kvm_riscv_unpriv_trap) |
214 | /* |
215 | * We assume that faulting unpriv load/store instruction is |
216 | * 4-byte long and blindly increment SEPC by 4. |
217 | * |
218 | * The trap details will be saved at address pointed by 'A0' |
219 | * register and we use 'A1' register as temporary. |
220 | */ |
221 | csrr a1, CSR_SEPC |
222 | REG_S a1, (KVM_ARCH_TRAP_SEPC)(a0) |
223 | addi a1, a1, 4 |
224 | csrw CSR_SEPC, a1 |
225 | csrr a1, CSR_SCAUSE |
226 | REG_S a1, (KVM_ARCH_TRAP_SCAUSE)(a0) |
227 | csrr a1, CSR_STVAL |
228 | REG_S a1, (KVM_ARCH_TRAP_STVAL)(a0) |
229 | csrr a1, CSR_HTVAL |
230 | REG_S a1, (KVM_ARCH_TRAP_HTVAL)(a0) |
231 | csrr a1, CSR_HTINST |
232 | REG_S a1, (KVM_ARCH_TRAP_HTINST)(a0) |
233 | sret |
234 | SYM_CODE_END(__kvm_riscv_unpriv_trap) |
235 | |
236 | #ifdef CONFIG_FPU |
237 | SYM_FUNC_START(__kvm_riscv_fp_f_save) |
238 | csrr t2, CSR_SSTATUS |
239 | li t1, SR_FS |
240 | csrs CSR_SSTATUS, t1 |
241 | frcsr t0 |
242 | fsw f0, KVM_ARCH_FP_F_F0(a0) |
243 | fsw f1, KVM_ARCH_FP_F_F1(a0) |
244 | fsw f2, KVM_ARCH_FP_F_F2(a0) |
245 | fsw f3, KVM_ARCH_FP_F_F3(a0) |
246 | fsw f4, KVM_ARCH_FP_F_F4(a0) |
247 | fsw f5, KVM_ARCH_FP_F_F5(a0) |
248 | fsw f6, KVM_ARCH_FP_F_F6(a0) |
249 | fsw f7, KVM_ARCH_FP_F_F7(a0) |
250 | fsw f8, KVM_ARCH_FP_F_F8(a0) |
251 | fsw f9, KVM_ARCH_FP_F_F9(a0) |
252 | fsw f10, KVM_ARCH_FP_F_F10(a0) |
253 | fsw f11, KVM_ARCH_FP_F_F11(a0) |
254 | fsw f12, KVM_ARCH_FP_F_F12(a0) |
255 | fsw f13, KVM_ARCH_FP_F_F13(a0) |
256 | fsw f14, KVM_ARCH_FP_F_F14(a0) |
257 | fsw f15, KVM_ARCH_FP_F_F15(a0) |
258 | fsw f16, KVM_ARCH_FP_F_F16(a0) |
259 | fsw f17, KVM_ARCH_FP_F_F17(a0) |
260 | fsw f18, KVM_ARCH_FP_F_F18(a0) |
261 | fsw f19, KVM_ARCH_FP_F_F19(a0) |
262 | fsw f20, KVM_ARCH_FP_F_F20(a0) |
263 | fsw f21, KVM_ARCH_FP_F_F21(a0) |
264 | fsw f22, KVM_ARCH_FP_F_F22(a0) |
265 | fsw f23, KVM_ARCH_FP_F_F23(a0) |
266 | fsw f24, KVM_ARCH_FP_F_F24(a0) |
267 | fsw f25, KVM_ARCH_FP_F_F25(a0) |
268 | fsw f26, KVM_ARCH_FP_F_F26(a0) |
269 | fsw f27, KVM_ARCH_FP_F_F27(a0) |
270 | fsw f28, KVM_ARCH_FP_F_F28(a0) |
271 | fsw f29, KVM_ARCH_FP_F_F29(a0) |
272 | fsw f30, KVM_ARCH_FP_F_F30(a0) |
273 | fsw f31, KVM_ARCH_FP_F_F31(a0) |
274 | sw t0, KVM_ARCH_FP_F_FCSR(a0) |
275 | csrw CSR_SSTATUS, t2 |
276 | ret |
277 | SYM_FUNC_END(__kvm_riscv_fp_f_save) |
278 | |
279 | SYM_FUNC_START(__kvm_riscv_fp_d_save) |
280 | csrr t2, CSR_SSTATUS |
281 | li t1, SR_FS |
282 | csrs CSR_SSTATUS, t1 |
283 | frcsr t0 |
284 | fsd f0, KVM_ARCH_FP_D_F0(a0) |
285 | fsd f1, KVM_ARCH_FP_D_F1(a0) |
286 | fsd f2, KVM_ARCH_FP_D_F2(a0) |
287 | fsd f3, KVM_ARCH_FP_D_F3(a0) |
288 | fsd f4, KVM_ARCH_FP_D_F4(a0) |
289 | fsd f5, KVM_ARCH_FP_D_F5(a0) |
290 | fsd f6, KVM_ARCH_FP_D_F6(a0) |
291 | fsd f7, KVM_ARCH_FP_D_F7(a0) |
292 | fsd f8, KVM_ARCH_FP_D_F8(a0) |
293 | fsd f9, KVM_ARCH_FP_D_F9(a0) |
294 | fsd f10, KVM_ARCH_FP_D_F10(a0) |
295 | fsd f11, KVM_ARCH_FP_D_F11(a0) |
296 | fsd f12, KVM_ARCH_FP_D_F12(a0) |
297 | fsd f13, KVM_ARCH_FP_D_F13(a0) |
298 | fsd f14, KVM_ARCH_FP_D_F14(a0) |
299 | fsd f15, KVM_ARCH_FP_D_F15(a0) |
300 | fsd f16, KVM_ARCH_FP_D_F16(a0) |
301 | fsd f17, KVM_ARCH_FP_D_F17(a0) |
302 | fsd f18, KVM_ARCH_FP_D_F18(a0) |
303 | fsd f19, KVM_ARCH_FP_D_F19(a0) |
304 | fsd f20, KVM_ARCH_FP_D_F20(a0) |
305 | fsd f21, KVM_ARCH_FP_D_F21(a0) |
306 | fsd f22, KVM_ARCH_FP_D_F22(a0) |
307 | fsd f23, KVM_ARCH_FP_D_F23(a0) |
308 | fsd f24, KVM_ARCH_FP_D_F24(a0) |
309 | fsd f25, KVM_ARCH_FP_D_F25(a0) |
310 | fsd f26, KVM_ARCH_FP_D_F26(a0) |
311 | fsd f27, KVM_ARCH_FP_D_F27(a0) |
312 | fsd f28, KVM_ARCH_FP_D_F28(a0) |
313 | fsd f29, KVM_ARCH_FP_D_F29(a0) |
314 | fsd f30, KVM_ARCH_FP_D_F30(a0) |
315 | fsd f31, KVM_ARCH_FP_D_F31(a0) |
316 | sw t0, KVM_ARCH_FP_D_FCSR(a0) |
317 | csrw CSR_SSTATUS, t2 |
318 | ret |
319 | SYM_FUNC_END(__kvm_riscv_fp_d_save) |
320 | |
321 | SYM_FUNC_START(__kvm_riscv_fp_f_restore) |
322 | csrr t2, CSR_SSTATUS |
323 | li t1, SR_FS |
324 | lw t0, KVM_ARCH_FP_F_FCSR(a0) |
325 | csrs CSR_SSTATUS, t1 |
326 | flw f0, KVM_ARCH_FP_F_F0(a0) |
327 | flw f1, KVM_ARCH_FP_F_F1(a0) |
328 | flw f2, KVM_ARCH_FP_F_F2(a0) |
329 | flw f3, KVM_ARCH_FP_F_F3(a0) |
330 | flw f4, KVM_ARCH_FP_F_F4(a0) |
331 | flw f5, KVM_ARCH_FP_F_F5(a0) |
332 | flw f6, KVM_ARCH_FP_F_F6(a0) |
333 | flw f7, KVM_ARCH_FP_F_F7(a0) |
334 | flw f8, KVM_ARCH_FP_F_F8(a0) |
335 | flw f9, KVM_ARCH_FP_F_F9(a0) |
336 | flw f10, KVM_ARCH_FP_F_F10(a0) |
337 | flw f11, KVM_ARCH_FP_F_F11(a0) |
338 | flw f12, KVM_ARCH_FP_F_F12(a0) |
339 | flw f13, KVM_ARCH_FP_F_F13(a0) |
340 | flw f14, KVM_ARCH_FP_F_F14(a0) |
341 | flw f15, KVM_ARCH_FP_F_F15(a0) |
342 | flw f16, KVM_ARCH_FP_F_F16(a0) |
343 | flw f17, KVM_ARCH_FP_F_F17(a0) |
344 | flw f18, KVM_ARCH_FP_F_F18(a0) |
345 | flw f19, KVM_ARCH_FP_F_F19(a0) |
346 | flw f20, KVM_ARCH_FP_F_F20(a0) |
347 | flw f21, KVM_ARCH_FP_F_F21(a0) |
348 | flw f22, KVM_ARCH_FP_F_F22(a0) |
349 | flw f23, KVM_ARCH_FP_F_F23(a0) |
350 | flw f24, KVM_ARCH_FP_F_F24(a0) |
351 | flw f25, KVM_ARCH_FP_F_F25(a0) |
352 | flw f26, KVM_ARCH_FP_F_F26(a0) |
353 | flw f27, KVM_ARCH_FP_F_F27(a0) |
354 | flw f28, KVM_ARCH_FP_F_F28(a0) |
355 | flw f29, KVM_ARCH_FP_F_F29(a0) |
356 | flw f30, KVM_ARCH_FP_F_F30(a0) |
357 | flw f31, KVM_ARCH_FP_F_F31(a0) |
358 | fscsr t0 |
359 | csrw CSR_SSTATUS, t2 |
360 | ret |
361 | SYM_FUNC_END(__kvm_riscv_fp_f_restore) |
362 | |
363 | SYM_FUNC_START(__kvm_riscv_fp_d_restore) |
364 | csrr t2, CSR_SSTATUS |
365 | li t1, SR_FS |
366 | lw t0, KVM_ARCH_FP_D_FCSR(a0) |
367 | csrs CSR_SSTATUS, t1 |
368 | fld f0, KVM_ARCH_FP_D_F0(a0) |
369 | fld f1, KVM_ARCH_FP_D_F1(a0) |
370 | fld f2, KVM_ARCH_FP_D_F2(a0) |
371 | fld f3, KVM_ARCH_FP_D_F3(a0) |
372 | fld f4, KVM_ARCH_FP_D_F4(a0) |
373 | fld f5, KVM_ARCH_FP_D_F5(a0) |
374 | fld f6, KVM_ARCH_FP_D_F6(a0) |
375 | fld f7, KVM_ARCH_FP_D_F7(a0) |
376 | fld f8, KVM_ARCH_FP_D_F8(a0) |
377 | fld f9, KVM_ARCH_FP_D_F9(a0) |
378 | fld f10, KVM_ARCH_FP_D_F10(a0) |
379 | fld f11, KVM_ARCH_FP_D_F11(a0) |
380 | fld f12, KVM_ARCH_FP_D_F12(a0) |
381 | fld f13, KVM_ARCH_FP_D_F13(a0) |
382 | fld f14, KVM_ARCH_FP_D_F14(a0) |
383 | fld f15, KVM_ARCH_FP_D_F15(a0) |
384 | fld f16, KVM_ARCH_FP_D_F16(a0) |
385 | fld f17, KVM_ARCH_FP_D_F17(a0) |
386 | fld f18, KVM_ARCH_FP_D_F18(a0) |
387 | fld f19, KVM_ARCH_FP_D_F19(a0) |
388 | fld f20, KVM_ARCH_FP_D_F20(a0) |
389 | fld f21, KVM_ARCH_FP_D_F21(a0) |
390 | fld f22, KVM_ARCH_FP_D_F22(a0) |
391 | fld f23, KVM_ARCH_FP_D_F23(a0) |
392 | fld f24, KVM_ARCH_FP_D_F24(a0) |
393 | fld f25, KVM_ARCH_FP_D_F25(a0) |
394 | fld f26, KVM_ARCH_FP_D_F26(a0) |
395 | fld f27, KVM_ARCH_FP_D_F27(a0) |
396 | fld f28, KVM_ARCH_FP_D_F28(a0) |
397 | fld f29, KVM_ARCH_FP_D_F29(a0) |
398 | fld f30, KVM_ARCH_FP_D_F30(a0) |
399 | fld f31, KVM_ARCH_FP_D_F31(a0) |
400 | fscsr t0 |
401 | csrw CSR_SSTATUS, t2 |
402 | ret |
403 | SYM_FUNC_END(__kvm_riscv_fp_d_restore) |
404 | #endif |
405 | |