1 | /* |
2 | * |
3 | * This file is subject to the terms and conditions of the GNU General Public |
4 | * License. See the file "COPYING" in the main directory of this archive |
5 | * for more details. |
6 | * |
7 | * Copyright (C) 2000 Hewlett Packard (Paul Bame bame@puffin.external.hp.com) |
8 | * |
9 | */ |
10 | |
11 | #include <asm/pdc.h> |
12 | #include <asm/psw.h> |
13 | #include <asm/assembly.h> |
14 | #include <asm/asm-offsets.h> |
15 | |
16 | #include <linux/linkage.h> |
17 | |
18 | .export real_stack |
19 | .export real64_stack |
20 | __PAGE_ALIGNED_BSS |
21 | real_stack: |
22 | real64_stack: |
23 | .block 8192 |
24 | |
25 | #define N_SAVED_REGS 9 |
26 | .section .bss |
27 | save_cr_space: |
28 | .block REG_SZ * N_SAVED_REGS |
29 | save_cr_end: |
30 | |
31 | |
32 | /************************ 32-bit real-mode calls ***********************/ |
33 | /* This can be called in both narrow and wide kernels */ |
34 | |
35 | .text |
36 | |
37 | /* unsigned long real32_call_asm(unsigned int *sp, |
38 | * unsigned int *arg0p, |
39 | * unsigned int iodc_fn) |
40 | * sp is value of stack pointer to adopt before calling PDC (virt) |
41 | * arg0p points to where saved arg values may be found |
42 | * iodc_fn is the IODC function to call |
43 | */ |
44 | |
45 | ENTRY_CFI(real32_call_asm) |
46 | STREG %rp, -RP_OFFSET(%sp) /* save RP */ |
47 | #ifdef CONFIG_64BIT |
48 | callee_save |
49 | ldo 2*REG_SZ(%sp), %sp /* room for a couple more saves */ |
50 | STREG %r27, -1*REG_SZ(%sp) |
51 | STREG %r29, -2*REG_SZ(%sp) |
52 | #endif |
53 | STREG %sp, -REG_SZ(%arg0) /* save SP on real-mode stack */ |
54 | copy %arg0, %sp /* adopt the real-mode SP */ |
55 | |
56 | /* save iodc_fn */ |
57 | copy %arg2, %r31 |
58 | |
59 | /* load up the arg registers from the saved arg area */ |
60 | /* 32-bit calling convention passes first 4 args in registers */ |
61 | ldw 0(%arg1), %arg0 /* note overwriting arg0 */ |
62 | ldw -8(%arg1), %arg2 |
63 | ldw -12(%arg1), %arg3 |
64 | ldw -4(%arg1), %arg1 /* obviously must do this one last! */ |
65 | |
66 | tophys_r1 %sp |
67 | |
68 | b,l rfi_virt2real,%r2 |
69 | nop |
70 | |
71 | b,l save_control_regs,%r2 /* modifies r1, r2, r28 */ |
72 | nop |
73 | |
74 | #ifdef CONFIG_64BIT |
75 | rsm PSW_SM_W, %r0 /* go narrow */ |
76 | #endif |
77 | |
78 | load32 PA(ric_ret), %r2 |
79 | bv 0(%r31) |
80 | nop |
81 | ric_ret: |
82 | #ifdef CONFIG_64BIT |
83 | ssm PSW_SM_W, %r0 /* go wide */ |
84 | #endif |
85 | /* restore CRs before going virtual in case we page fault */ |
86 | b,l restore_control_regs, %r2 /* modifies r1, r2, r26 */ |
87 | nop |
88 | |
89 | b,l rfi_real2virt,%r2 |
90 | nop |
91 | |
92 | tovirt_r1 %sp |
93 | LDREG -REG_SZ(%sp), %sp /* restore SP */ |
94 | #ifdef CONFIG_64BIT |
95 | LDREG -1*REG_SZ(%sp), %r27 |
96 | LDREG -2*REG_SZ(%sp), %r29 |
97 | ldo -2*REG_SZ(%sp), %sp |
98 | callee_rest |
99 | #endif |
100 | LDREG -RP_OFFSET(%sp), %rp /* restore RP */ |
101 | bv 0(%rp) |
102 | nop |
103 | ENDPROC_CFI(real32_call_asm) |
104 | |
105 | |
106 | # define PUSH_CR(r, where) mfctl r, %r1 ! STREG,ma %r1, REG_SZ(where) |
107 | # define POP_CR(r, where) LDREG,mb -REG_SZ(where), %r1 ! mtctl %r1, r |
108 | |
109 | .text |
110 | ENTRY_CFI(save_control_regs) |
111 | load32 PA(save_cr_space), %r28 |
112 | PUSH_CR(%cr24, %r28) |
113 | PUSH_CR(%cr25, %r28) |
114 | PUSH_CR(%cr26, %r28) |
115 | PUSH_CR(%cr27, %r28) |
116 | PUSH_CR(%cr28, %r28) |
117 | PUSH_CR(%cr29, %r28) |
118 | PUSH_CR(%cr30, %r28) |
119 | PUSH_CR(%cr31, %r28) |
120 | PUSH_CR(%cr15, %r28) |
121 | bv 0(%r2) |
122 | nop |
123 | ENDPROC_CFI(save_control_regs) |
124 | |
125 | ENTRY_CFI(restore_control_regs) |
126 | load32 PA(save_cr_end), %r26 |
127 | POP_CR(%cr15, %r26) |
128 | POP_CR(%cr31, %r26) |
129 | POP_CR(%cr30, %r26) |
130 | POP_CR(%cr29, %r26) |
131 | POP_CR(%cr28, %r26) |
132 | POP_CR(%cr27, %r26) |
133 | POP_CR(%cr26, %r26) |
134 | POP_CR(%cr25, %r26) |
135 | POP_CR(%cr24, %r26) |
136 | bv 0(%r2) |
137 | nop |
138 | ENDPROC_CFI(restore_control_regs) |
139 | |
140 | /* rfi_virt2real() and rfi_real2virt() could perhaps be adapted for |
141 | * more general-purpose use by the several places which need RFIs |
142 | */ |
143 | .text |
144 | .align 128 |
145 | ENTRY_CFI(rfi_virt2real) |
146 | #if !defined(BOOTLOADER) |
147 | /* switch to real mode... */ |
148 | rsm PSW_SM_I,%r0 |
149 | load32 PA(rfi_v2r_1), %r1 |
150 | nop |
151 | nop |
152 | nop |
153 | nop |
154 | nop |
155 | |
156 | rsm PSW_SM_Q,%r0 /* disable Q & I bits to load iia queue */ |
157 | mtctl %r0, %cr17 /* Clear IIASQ tail */ |
158 | mtctl %r0, %cr17 /* Clear IIASQ head */ |
159 | mtctl %r1, %cr18 /* IIAOQ head */ |
160 | ldo 4(%r1), %r1 |
161 | mtctl %r1, %cr18 /* IIAOQ tail */ |
162 | load32 REAL_MODE_PSW, %r1 |
163 | mtctl %r1, %cr22 |
164 | rfi |
165 | |
166 | nop |
167 | nop |
168 | nop |
169 | nop |
170 | nop |
171 | nop |
172 | nop |
173 | nop |
174 | rfi_v2r_1: |
175 | tophys_r1 %r2 |
176 | #endif /* defined(BOOTLOADER) */ |
177 | bv 0(%r2) |
178 | nop |
179 | ENDPROC_CFI(rfi_virt2real) |
180 | |
181 | .text |
182 | .align 128 |
183 | ENTRY_CFI(rfi_real2virt) |
184 | #if !defined(BOOTLOADER) |
185 | rsm PSW_SM_I,%r0 |
186 | load32 (rfi_r2v_1), %r1 |
187 | nop |
188 | nop |
189 | nop |
190 | nop |
191 | nop |
192 | |
193 | rsm PSW_SM_Q,%r0 /* disable Q bit to load iia queue */ |
194 | mtctl %r0, %cr17 /* Clear IIASQ tail */ |
195 | mtctl %r0, %cr17 /* Clear IIASQ head */ |
196 | mtctl %r1, %cr18 /* IIAOQ head */ |
197 | ldo 4(%r1), %r1 |
198 | mtctl %r1, %cr18 /* IIAOQ tail */ |
199 | load32 KERNEL_PSW, %r1 |
200 | mtctl %r1, %cr22 |
201 | rfi |
202 | |
203 | nop |
204 | nop |
205 | nop |
206 | nop |
207 | nop |
208 | nop |
209 | nop |
210 | nop |
211 | rfi_r2v_1: |
212 | tovirt_r1 %r2 |
213 | #endif /* defined(BOOTLOADER) */ |
214 | bv 0(%r2) |
215 | nop |
216 | ENDPROC_CFI(rfi_real2virt) |
217 | |
218 | #ifdef CONFIG_64BIT |
219 | |
220 | /************************ 64-bit real-mode calls ***********************/ |
221 | /* This is only usable in wide kernels right now and will probably stay so */ |
222 | .text |
223 | /* unsigned long real64_call_asm(unsigned long *sp, |
224 | * unsigned long *arg0p, |
225 | * unsigned long fn) |
226 | * sp is value of stack pointer to adopt before calling PDC (virt) |
227 | * arg0p points to where saved arg values may be found |
228 | * iodc_fn is the IODC function to call |
229 | */ |
230 | ENTRY_CFI(real64_call_asm) |
231 | std %rp, -0x10(%sp) /* save RP */ |
232 | std %sp, -8(%arg0) /* save SP on real-mode stack */ |
233 | copy %arg0, %sp /* adopt the real-mode SP */ |
234 | |
235 | /* save fn */ |
236 | copy %arg2, %r31 |
237 | |
238 | /* load up the arg registers from the saved arg area */ |
239 | /* 32-bit calling convention passes first 4 args in registers */ |
240 | ldd 0*REG_SZ(%arg1), %arg0 /* note overwriting arg0 */ |
241 | ldd 2*REG_SZ(%arg1), %arg2 |
242 | ldd 3*REG_SZ(%arg1), %arg3 |
243 | ldd 4*REG_SZ(%arg1), %r22 |
244 | ldd 5*REG_SZ(%arg1), %r21 |
245 | ldd 6*REG_SZ(%arg1), %r20 |
246 | ldd 7*REG_SZ(%arg1), %r19 |
247 | ldd 1*REG_SZ(%arg1), %arg1 /* do this one last! */ |
248 | |
249 | /* set up real-mode stack and real-mode ap */ |
250 | tophys_r1 %sp |
251 | ldo -16(%sp), %r29 /* Reference param save area */ |
252 | |
253 | b,l rfi_virt2real,%r2 |
254 | nop |
255 | |
256 | b,l save_control_regs,%r2 /* modifies r1, r2, r28 */ |
257 | nop |
258 | |
259 | load32 PA(r64_ret), %r2 |
260 | bv 0(%r31) |
261 | nop |
262 | r64_ret: |
263 | /* restore CRs before going virtual in case we page fault */ |
264 | b,l restore_control_regs, %r2 /* modifies r1, r2, r26 */ |
265 | nop |
266 | |
267 | b,l rfi_real2virt,%r2 |
268 | nop |
269 | |
270 | tovirt_r1 %sp |
271 | ldd -8(%sp), %sp /* restore SP */ |
272 | ldd -0x10(%sp), %rp /* restore RP */ |
273 | bv 0(%rp) |
274 | nop |
275 | ENDPROC_CFI(real64_call_asm) |
276 | |
277 | #endif |
278 | |
279 | .text |
280 | /* http://lists.parisc-linux.org/hypermail/parisc-linux/10916.html |
281 | ** GCC 3.3 and later has a new function in libgcc.a for |
282 | ** comparing function pointers. |
283 | */ |
284 | ENTRY_CFI(__canonicalize_funcptr_for_compare) |
285 | #ifdef CONFIG_64BIT |
286 | bve (%r2) |
287 | #else |
288 | bv %r0(%r2) |
289 | #endif |
290 | copy %r26,%r28 |
291 | ENDPROC_CFI(__canonicalize_funcptr_for_compare) |
292 | |
293 | |