Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | /* $Id: entry.S,v 1.144 2002/02/09 19:49:30 davem Exp $ |
2 | * arch/sparc64/kernel/entry.S: Sparc64 trap low-level entry points. | |
3 | * | |
4 | * Copyright (C) 1995,1997 David S. Miller (davem@caip.rutgers.edu) | |
5 | * Copyright (C) 1996 Eddie C. Dost (ecd@skynet.be) | |
6 | * Copyright (C) 1996 Miguel de Icaza (miguel@nuclecu.unam.mx) | |
7 | * Copyright (C) 1996,98,99 Jakub Jelinek (jj@sunsite.mff.cuni.cz) | |
8 | */ | |
9 | ||
10 | #include <linux/config.h> | |
11 | #include <linux/errno.h> | |
12 | ||
13 | #include <asm/head.h> | |
14 | #include <asm/asi.h> | |
15 | #include <asm/smp.h> | |
16 | #include <asm/ptrace.h> | |
17 | #include <asm/page.h> | |
18 | #include <asm/signal.h> | |
19 | #include <asm/pgtable.h> | |
20 | #include <asm/processor.h> | |
21 | #include <asm/visasm.h> | |
22 | #include <asm/estate.h> | |
23 | #include <asm/auxio.h> | |
6c52a96e | 24 | #include <asm/sfafsr.h> |
1da177e4 | 25 | |
1da177e4 LT |
26 | #define curptr g6 |
27 | ||
44158637 | 28 | #define NR_SYSCALLS 299 /* Each OS is different... */ |
1da177e4 LT |
29 | |
30 | .text | |
31 | .align 32 | |
32 | ||
1da177e4 LT |
33 | /* This is trivial with the new code... */ |
34 | .globl do_fpdis | |
35 | do_fpdis: | |
ba639933 | 36 | sethi %hi(TSTATE_PEF), %g4 |
1da177e4 LT |
37 | rdpr %tstate, %g5 |
38 | andcc %g5, %g4, %g0 | |
39 | be,pt %xcc, 1f | |
40 | nop | |
41 | rd %fprs, %g5 | |
42 | andcc %g5, FPRS_FEF, %g0 | |
43 | be,pt %xcc, 1f | |
44 | nop | |
45 | ||
46 | /* Legal state when DCR_IFPOE is set in Cheetah %dcr. */ | |
47 | sethi %hi(109f), %g7 | |
48 | ba,pt %xcc, etrap | |
49 | 109: or %g7, %lo(109b), %g7 | |
50 | add %g0, %g0, %g0 | |
51 | ba,a,pt %xcc, rtrap_clr_l6 | |
52 | ||
ba639933 DM |
53 | 1: ldub [%g6 + TI_FPSAVED], %g5 |
54 | wr %g0, FPRS_FEF, %fprs | |
55 | andcc %g5, FPRS_FEF, %g0 | |
56 | be,a,pt %icc, 1f | |
57 | clr %g7 | |
58 | ldx [%g6 + TI_GSR], %g7 | |
59 | 1: andcc %g5, FPRS_DL, %g0 | |
60 | bne,pn %icc, 2f | |
61 | fzero %f0 | |
62 | andcc %g5, FPRS_DU, %g0 | |
63 | bne,pn %icc, 1f | |
64 | fzero %f2 | |
1da177e4 LT |
65 | faddd %f0, %f2, %f4 |
66 | fmuld %f0, %f2, %f6 | |
67 | faddd %f0, %f2, %f8 | |
68 | fmuld %f0, %f2, %f10 | |
69 | faddd %f0, %f2, %f12 | |
70 | fmuld %f0, %f2, %f14 | |
71 | faddd %f0, %f2, %f16 | |
72 | fmuld %f0, %f2, %f18 | |
73 | faddd %f0, %f2, %f20 | |
74 | fmuld %f0, %f2, %f22 | |
75 | faddd %f0, %f2, %f24 | |
76 | fmuld %f0, %f2, %f26 | |
77 | faddd %f0, %f2, %f28 | |
78 | fmuld %f0, %f2, %f30 | |
79 | faddd %f0, %f2, %f32 | |
80 | fmuld %f0, %f2, %f34 | |
81 | faddd %f0, %f2, %f36 | |
82 | fmuld %f0, %f2, %f38 | |
83 | faddd %f0, %f2, %f40 | |
84 | fmuld %f0, %f2, %f42 | |
85 | faddd %f0, %f2, %f44 | |
86 | fmuld %f0, %f2, %f46 | |
87 | faddd %f0, %f2, %f48 | |
88 | fmuld %f0, %f2, %f50 | |
89 | faddd %f0, %f2, %f52 | |
90 | fmuld %f0, %f2, %f54 | |
91 | faddd %f0, %f2, %f56 | |
92 | fmuld %f0, %f2, %f58 | |
93 | b,pt %xcc, fpdis_exit2 | |
94 | faddd %f0, %f2, %f60 | |
95 | 1: mov SECONDARY_CONTEXT, %g3 | |
96 | add %g6, TI_FPREGS + 0x80, %g1 | |
97 | faddd %f0, %f2, %f4 | |
98 | fmuld %f0, %f2, %f6 | |
99 | ldxa [%g3] ASI_DMMU, %g5 | |
0835ae0f DM |
100 | sethi %hi(sparc64_kern_sec_context), %g2 |
101 | ldx [%g2 + %lo(sparc64_kern_sec_context)], %g2 | |
1da177e4 LT |
102 | stxa %g2, [%g3] ASI_DMMU |
103 | membar #Sync | |
104 | add %g6, TI_FPREGS + 0xc0, %g2 | |
105 | faddd %f0, %f2, %f8 | |
106 | fmuld %f0, %f2, %f10 | |
ba639933 DM |
107 | membar #Sync |
108 | ldda [%g1] ASI_BLK_S, %f32 | |
1da177e4 | 109 | ldda [%g2] ASI_BLK_S, %f48 |
ba639933 | 110 | membar #Sync |
1da177e4 LT |
111 | faddd %f0, %f2, %f12 |
112 | fmuld %f0, %f2, %f14 | |
113 | faddd %f0, %f2, %f16 | |
114 | fmuld %f0, %f2, %f18 | |
115 | faddd %f0, %f2, %f20 | |
116 | fmuld %f0, %f2, %f22 | |
117 | faddd %f0, %f2, %f24 | |
118 | fmuld %f0, %f2, %f26 | |
119 | faddd %f0, %f2, %f28 | |
120 | fmuld %f0, %f2, %f30 | |
121 | b,pt %xcc, fpdis_exit | |
b445e26c | 122 | nop |
1da177e4 LT |
123 | 2: andcc %g5, FPRS_DU, %g0 |
124 | bne,pt %icc, 3f | |
125 | fzero %f32 | |
126 | mov SECONDARY_CONTEXT, %g3 | |
127 | fzero %f34 | |
128 | ldxa [%g3] ASI_DMMU, %g5 | |
129 | add %g6, TI_FPREGS, %g1 | |
0835ae0f DM |
130 | sethi %hi(sparc64_kern_sec_context), %g2 |
131 | ldx [%g2 + %lo(sparc64_kern_sec_context)], %g2 | |
1da177e4 LT |
132 | stxa %g2, [%g3] ASI_DMMU |
133 | membar #Sync | |
134 | add %g6, TI_FPREGS + 0x40, %g2 | |
135 | faddd %f32, %f34, %f36 | |
136 | fmuld %f32, %f34, %f38 | |
ba639933 DM |
137 | membar #Sync |
138 | ldda [%g1] ASI_BLK_S, %f0 | |
1da177e4 | 139 | ldda [%g2] ASI_BLK_S, %f16 |
ba639933 | 140 | membar #Sync |
1da177e4 LT |
141 | faddd %f32, %f34, %f40 |
142 | fmuld %f32, %f34, %f42 | |
143 | faddd %f32, %f34, %f44 | |
144 | fmuld %f32, %f34, %f46 | |
145 | faddd %f32, %f34, %f48 | |
146 | fmuld %f32, %f34, %f50 | |
147 | faddd %f32, %f34, %f52 | |
148 | fmuld %f32, %f34, %f54 | |
149 | faddd %f32, %f34, %f56 | |
150 | fmuld %f32, %f34, %f58 | |
151 | faddd %f32, %f34, %f60 | |
152 | fmuld %f32, %f34, %f62 | |
153 | ba,pt %xcc, fpdis_exit | |
b445e26c | 154 | nop |
1da177e4 LT |
155 | 3: mov SECONDARY_CONTEXT, %g3 |
156 | add %g6, TI_FPREGS, %g1 | |
157 | ldxa [%g3] ASI_DMMU, %g5 | |
0835ae0f DM |
158 | sethi %hi(sparc64_kern_sec_context), %g2 |
159 | ldx [%g2 + %lo(sparc64_kern_sec_context)], %g2 | |
1da177e4 LT |
160 | stxa %g2, [%g3] ASI_DMMU |
161 | membar #Sync | |
162 | mov 0x40, %g2 | |
ba639933 DM |
163 | membar #Sync |
164 | ldda [%g1] ASI_BLK_S, %f0 | |
1da177e4 LT |
165 | ldda [%g1 + %g2] ASI_BLK_S, %f16 |
166 | add %g1, 0x80, %g1 | |
167 | ldda [%g1] ASI_BLK_S, %f32 | |
168 | ldda [%g1 + %g2] ASI_BLK_S, %f48 | |
169 | membar #Sync | |
170 | fpdis_exit: | |
171 | stxa %g5, [%g3] ASI_DMMU | |
172 | membar #Sync | |
173 | fpdis_exit2: | |
174 | wr %g7, 0, %gsr | |
175 | ldx [%g6 + TI_XFSR], %fsr | |
176 | rdpr %tstate, %g3 | |
177 | or %g3, %g4, %g3 ! anal... | |
178 | wrpr %g3, %tstate | |
179 | wr %g0, FPRS_FEF, %fprs ! clean DU/DL bits | |
180 | retry | |
181 | ||
182 | .align 32 | |
183 | fp_other_bounce: | |
184 | call do_fpother | |
185 | add %sp, PTREGS_OFF, %o0 | |
186 | ba,pt %xcc, rtrap | |
187 | clr %l6 | |
188 | ||
189 | .globl do_fpother_check_fitos | |
190 | .align 32 | |
191 | do_fpother_check_fitos: | |
192 | sethi %hi(fp_other_bounce - 4), %g7 | |
193 | or %g7, %lo(fp_other_bounce - 4), %g7 | |
194 | ||
195 | /* NOTE: Need to preserve %g7 until we fully commit | |
196 | * to the fitos fixup. | |
197 | */ | |
198 | stx %fsr, [%g6 + TI_XFSR] | |
199 | rdpr %tstate, %g3 | |
200 | andcc %g3, TSTATE_PRIV, %g0 | |
201 | bne,pn %xcc, do_fptrap_after_fsr | |
202 | nop | |
203 | ldx [%g6 + TI_XFSR], %g3 | |
204 | srlx %g3, 14, %g1 | |
205 | and %g1, 7, %g1 | |
206 | cmp %g1, 2 ! Unfinished FP-OP | |
207 | bne,pn %xcc, do_fptrap_after_fsr | |
208 | sethi %hi(1 << 23), %g1 ! Inexact | |
209 | andcc %g3, %g1, %g0 | |
210 | bne,pn %xcc, do_fptrap_after_fsr | |
211 | rdpr %tpc, %g1 | |
212 | lduwa [%g1] ASI_AIUP, %g3 ! This cannot ever fail | |
213 | #define FITOS_MASK 0xc1f83fe0 | |
214 | #define FITOS_COMPARE 0x81a01880 | |
215 | sethi %hi(FITOS_MASK), %g1 | |
216 | or %g1, %lo(FITOS_MASK), %g1 | |
217 | and %g3, %g1, %g1 | |
218 | sethi %hi(FITOS_COMPARE), %g2 | |
219 | or %g2, %lo(FITOS_COMPARE), %g2 | |
220 | cmp %g1, %g2 | |
221 | bne,pn %xcc, do_fptrap_after_fsr | |
222 | nop | |
223 | std %f62, [%g6 + TI_FPREGS + (62 * 4)] | |
224 | sethi %hi(fitos_table_1), %g1 | |
225 | and %g3, 0x1f, %g2 | |
226 | or %g1, %lo(fitos_table_1), %g1 | |
227 | sllx %g2, 2, %g2 | |
228 | jmpl %g1 + %g2, %g0 | |
229 | ba,pt %xcc, fitos_emul_continue | |
230 | ||
231 | fitos_table_1: | |
232 | fitod %f0, %f62 | |
233 | fitod %f1, %f62 | |
234 | fitod %f2, %f62 | |
235 | fitod %f3, %f62 | |
236 | fitod %f4, %f62 | |
237 | fitod %f5, %f62 | |
238 | fitod %f6, %f62 | |
239 | fitod %f7, %f62 | |
240 | fitod %f8, %f62 | |
241 | fitod %f9, %f62 | |
242 | fitod %f10, %f62 | |
243 | fitod %f11, %f62 | |
244 | fitod %f12, %f62 | |
245 | fitod %f13, %f62 | |
246 | fitod %f14, %f62 | |
247 | fitod %f15, %f62 | |
248 | fitod %f16, %f62 | |
249 | fitod %f17, %f62 | |
250 | fitod %f18, %f62 | |
251 | fitod %f19, %f62 | |
252 | fitod %f20, %f62 | |
253 | fitod %f21, %f62 | |
254 | fitod %f22, %f62 | |
255 | fitod %f23, %f62 | |
256 | fitod %f24, %f62 | |
257 | fitod %f25, %f62 | |
258 | fitod %f26, %f62 | |
259 | fitod %f27, %f62 | |
260 | fitod %f28, %f62 | |
261 | fitod %f29, %f62 | |
262 | fitod %f30, %f62 | |
263 | fitod %f31, %f62 | |
264 | ||
265 | fitos_emul_continue: | |
266 | sethi %hi(fitos_table_2), %g1 | |
267 | srl %g3, 25, %g2 | |
268 | or %g1, %lo(fitos_table_2), %g1 | |
269 | and %g2, 0x1f, %g2 | |
270 | sllx %g2, 2, %g2 | |
271 | jmpl %g1 + %g2, %g0 | |
272 | ba,pt %xcc, fitos_emul_fini | |
273 | ||
274 | fitos_table_2: | |
275 | fdtos %f62, %f0 | |
276 | fdtos %f62, %f1 | |
277 | fdtos %f62, %f2 | |
278 | fdtos %f62, %f3 | |
279 | fdtos %f62, %f4 | |
280 | fdtos %f62, %f5 | |
281 | fdtos %f62, %f6 | |
282 | fdtos %f62, %f7 | |
283 | fdtos %f62, %f8 | |
284 | fdtos %f62, %f9 | |
285 | fdtos %f62, %f10 | |
286 | fdtos %f62, %f11 | |
287 | fdtos %f62, %f12 | |
288 | fdtos %f62, %f13 | |
289 | fdtos %f62, %f14 | |
290 | fdtos %f62, %f15 | |
291 | fdtos %f62, %f16 | |
292 | fdtos %f62, %f17 | |
293 | fdtos %f62, %f18 | |
294 | fdtos %f62, %f19 | |
295 | fdtos %f62, %f20 | |
296 | fdtos %f62, %f21 | |
297 | fdtos %f62, %f22 | |
298 | fdtos %f62, %f23 | |
299 | fdtos %f62, %f24 | |
300 | fdtos %f62, %f25 | |
301 | fdtos %f62, %f26 | |
302 | fdtos %f62, %f27 | |
303 | fdtos %f62, %f28 | |
304 | fdtos %f62, %f29 | |
305 | fdtos %f62, %f30 | |
306 | fdtos %f62, %f31 | |
307 | ||
308 | fitos_emul_fini: | |
309 | ldd [%g6 + TI_FPREGS + (62 * 4)], %f62 | |
310 | done | |
311 | ||
312 | .globl do_fptrap | |
313 | .align 32 | |
314 | do_fptrap: | |
315 | stx %fsr, [%g6 + TI_XFSR] | |
316 | do_fptrap_after_fsr: | |
317 | ldub [%g6 + TI_FPSAVED], %g3 | |
318 | rd %fprs, %g1 | |
319 | or %g3, %g1, %g3 | |
320 | stb %g3, [%g6 + TI_FPSAVED] | |
321 | rd %gsr, %g3 | |
322 | stx %g3, [%g6 + TI_GSR] | |
323 | mov SECONDARY_CONTEXT, %g3 | |
324 | ldxa [%g3] ASI_DMMU, %g5 | |
0835ae0f DM |
325 | sethi %hi(sparc64_kern_sec_context), %g2 |
326 | ldx [%g2 + %lo(sparc64_kern_sec_context)], %g2 | |
1da177e4 LT |
327 | stxa %g2, [%g3] ASI_DMMU |
328 | membar #Sync | |
329 | add %g6, TI_FPREGS, %g2 | |
330 | andcc %g1, FPRS_DL, %g0 | |
331 | be,pn %icc, 4f | |
332 | mov 0x40, %g3 | |
333 | stda %f0, [%g2] ASI_BLK_S | |
334 | stda %f16, [%g2 + %g3] ASI_BLK_S | |
335 | andcc %g1, FPRS_DU, %g0 | |
336 | be,pn %icc, 5f | |
337 | 4: add %g2, 128, %g2 | |
338 | stda %f32, [%g2] ASI_BLK_S | |
339 | stda %f48, [%g2 + %g3] ASI_BLK_S | |
340 | 5: mov SECONDARY_CONTEXT, %g1 | |
341 | membar #Sync | |
342 | stxa %g5, [%g1] ASI_DMMU | |
343 | membar #Sync | |
344 | ba,pt %xcc, etrap | |
345 | wr %g0, 0, %fprs | |
346 | ||
1da177e4 LT |
347 | /* The registers for cross calls will be: |
348 | * | |
349 | * DATA 0: [low 32-bits] Address of function to call, jmp to this | |
350 | * [high 32-bits] MMU Context Argument 0, place in %g5 | |
80dc0d6b | 351 | * DATA 1: Address Argument 1, place in %g1 |
1da177e4 LT |
352 | * DATA 2: Address Argument 2, place in %g7 |
353 | * | |
354 | * With this method we can do most of the cross-call tlb/cache | |
355 | * flushing very quickly. | |
356 | * | |
80dc0d6b | 357 | * Current CPU's IRQ worklist table is locked into %g6, don't touch. |
1da177e4 LT |
358 | */ |
359 | .text | |
360 | .align 32 | |
361 | .globl do_ivec | |
362 | do_ivec: | |
363 | mov 0x40, %g3 | |
364 | ldxa [%g3 + %g0] ASI_INTR_R, %g3 | |
365 | sethi %hi(KERNBASE), %g4 | |
366 | cmp %g3, %g4 | |
367 | bgeu,pn %xcc, do_ivec_xcall | |
368 | srlx %g3, 32, %g5 | |
369 | stxa %g0, [%g0] ASI_INTR_RECEIVE | |
370 | membar #Sync | |
371 | ||
372 | sethi %hi(ivector_table), %g2 | |
373 | sllx %g3, 5, %g3 | |
374 | or %g2, %lo(ivector_table), %g2 | |
375 | add %g2, %g3, %g3 | |
1da177e4 | 376 | ldub [%g3 + 0x04], %g4 /* pil */ |
088dd1f8 | 377 | mov 1, %g2 |
1da177e4 LT |
378 | sllx %g2, %g4, %g2 |
379 | sllx %g4, 2, %g4 | |
088dd1f8 | 380 | |
1da177e4 LT |
381 | lduw [%g6 + %g4], %g5 /* g5 = irq_work(cpu, pil) */ |
382 | stw %g5, [%g3 + 0x00] /* bucket->irq_chain = g5 */ | |
383 | stw %g3, [%g6 + %g4] /* irq_work(cpu, pil) = bucket */ | |
384 | wr %g2, 0x0, %set_softint | |
385 | retry | |
386 | do_ivec_xcall: | |
387 | mov 0x50, %g1 | |
1da177e4 LT |
388 | ldxa [%g1 + %g0] ASI_INTR_R, %g1 |
389 | srl %g3, 0, %g3 | |
088dd1f8 | 390 | |
1da177e4 LT |
391 | mov 0x60, %g7 |
392 | ldxa [%g7 + %g0] ASI_INTR_R, %g7 | |
393 | stxa %g0, [%g0] ASI_INTR_RECEIVE | |
394 | membar #Sync | |
395 | ba,pt %xcc, 1f | |
396 | nop | |
397 | ||
398 | .align 32 | |
399 | 1: jmpl %g3, %g0 | |
400 | nop | |
401 | ||
1da177e4 LT |
402 | .globl save_alternate_globals |
403 | save_alternate_globals: /* %o0 = save_area */ | |
404 | rdpr %pstate, %o5 | |
405 | andn %o5, PSTATE_IE, %o1 | |
406 | wrpr %o1, PSTATE_AG, %pstate | |
407 | stx %g0, [%o0 + 0x00] | |
408 | stx %g1, [%o0 + 0x08] | |
409 | stx %g2, [%o0 + 0x10] | |
410 | stx %g3, [%o0 + 0x18] | |
411 | stx %g4, [%o0 + 0x20] | |
412 | stx %g5, [%o0 + 0x28] | |
413 | stx %g6, [%o0 + 0x30] | |
414 | stx %g7, [%o0 + 0x38] | |
415 | wrpr %o1, PSTATE_IG, %pstate | |
416 | stx %g0, [%o0 + 0x40] | |
417 | stx %g1, [%o0 + 0x48] | |
418 | stx %g2, [%o0 + 0x50] | |
419 | stx %g3, [%o0 + 0x58] | |
420 | stx %g4, [%o0 + 0x60] | |
421 | stx %g5, [%o0 + 0x68] | |
422 | stx %g6, [%o0 + 0x70] | |
423 | stx %g7, [%o0 + 0x78] | |
424 | wrpr %o1, PSTATE_MG, %pstate | |
425 | stx %g0, [%o0 + 0x80] | |
426 | stx %g1, [%o0 + 0x88] | |
427 | stx %g2, [%o0 + 0x90] | |
428 | stx %g3, [%o0 + 0x98] | |
429 | stx %g4, [%o0 + 0xa0] | |
430 | stx %g5, [%o0 + 0xa8] | |
431 | stx %g6, [%o0 + 0xb0] | |
432 | stx %g7, [%o0 + 0xb8] | |
433 | wrpr %o5, 0x0, %pstate | |
434 | retl | |
435 | nop | |
436 | ||
437 | .globl restore_alternate_globals | |
438 | restore_alternate_globals: /* %o0 = save_area */ | |
439 | rdpr %pstate, %o5 | |
440 | andn %o5, PSTATE_IE, %o1 | |
441 | wrpr %o1, PSTATE_AG, %pstate | |
442 | ldx [%o0 + 0x00], %g0 | |
443 | ldx [%o0 + 0x08], %g1 | |
444 | ldx [%o0 + 0x10], %g2 | |
445 | ldx [%o0 + 0x18], %g3 | |
446 | ldx [%o0 + 0x20], %g4 | |
447 | ldx [%o0 + 0x28], %g5 | |
448 | ldx [%o0 + 0x30], %g6 | |
449 | ldx [%o0 + 0x38], %g7 | |
450 | wrpr %o1, PSTATE_IG, %pstate | |
451 | ldx [%o0 + 0x40], %g0 | |
452 | ldx [%o0 + 0x48], %g1 | |
453 | ldx [%o0 + 0x50], %g2 | |
454 | ldx [%o0 + 0x58], %g3 | |
455 | ldx [%o0 + 0x60], %g4 | |
456 | ldx [%o0 + 0x68], %g5 | |
457 | ldx [%o0 + 0x70], %g6 | |
458 | ldx [%o0 + 0x78], %g7 | |
459 | wrpr %o1, PSTATE_MG, %pstate | |
460 | ldx [%o0 + 0x80], %g0 | |
461 | ldx [%o0 + 0x88], %g1 | |
462 | ldx [%o0 + 0x90], %g2 | |
463 | ldx [%o0 + 0x98], %g3 | |
464 | ldx [%o0 + 0xa0], %g4 | |
465 | ldx [%o0 + 0xa8], %g5 | |
466 | ldx [%o0 + 0xb0], %g6 | |
467 | ldx [%o0 + 0xb8], %g7 | |
468 | wrpr %o5, 0x0, %pstate | |
469 | retl | |
470 | nop | |
471 | ||
472 | .globl getcc, setcc | |
473 | getcc: | |
474 | ldx [%o0 + PT_V9_TSTATE], %o1 | |
475 | srlx %o1, 32, %o1 | |
476 | and %o1, 0xf, %o1 | |
477 | retl | |
478 | stx %o1, [%o0 + PT_V9_G1] | |
479 | setcc: | |
480 | ldx [%o0 + PT_V9_TSTATE], %o1 | |
481 | ldx [%o0 + PT_V9_G1], %o2 | |
482 | or %g0, %ulo(TSTATE_ICC), %o3 | |
483 | sllx %o3, 32, %o3 | |
484 | andn %o1, %o3, %o1 | |
485 | sllx %o2, 32, %o2 | |
486 | and %o2, %o3, %o2 | |
487 | or %o1, %o2, %o1 | |
488 | retl | |
489 | stx %o1, [%o0 + PT_V9_TSTATE] | |
490 | ||
491 | .globl utrap, utrap_ill | |
492 | utrap: brz,pn %g1, etrap | |
493 | nop | |
494 | save %sp, -128, %sp | |
495 | rdpr %tstate, %l6 | |
496 | rdpr %cwp, %l7 | |
497 | andn %l6, TSTATE_CWP, %l6 | |
498 | wrpr %l6, %l7, %tstate | |
499 | rdpr %tpc, %l6 | |
500 | rdpr %tnpc, %l7 | |
501 | wrpr %g1, 0, %tnpc | |
502 | done | |
503 | utrap_ill: | |
504 | call bad_trap | |
505 | add %sp, PTREGS_OFF, %o0 | |
506 | ba,pt %xcc, rtrap | |
507 | clr %l6 | |
508 | ||
1da177e4 LT |
509 | /* XXX Here is stuff we still need to write... -DaveM XXX */ |
510 | .globl netbsd_syscall | |
511 | netbsd_syscall: | |
512 | retl | |
513 | nop | |
514 | ||
6c52a96e DM |
515 | /* We need to carefully read the error status, ACK |
516 | * the errors, prevent recursive traps, and pass the | |
517 | * information on to C code for logging. | |
518 | * | |
519 | * We pass the AFAR in as-is, and we encode the status | |
520 | * information as described in asm-sparc64/sfafsr.h | |
521 | */ | |
522 | .globl __spitfire_access_error | |
523 | __spitfire_access_error: | |
524 | /* Disable ESTATE error reporting so that we do not | |
525 | * take recursive traps and RED state the processor. | |
526 | */ | |
527 | stxa %g0, [%g0] ASI_ESTATE_ERROR_EN | |
528 | membar #Sync | |
529 | ||
530 | mov UDBE_UE, %g1 | |
531 | ldxa [%g0] ASI_AFSR, %g4 ! Get AFSR | |
532 | ||
533 | /* __spitfire_cee_trap branches here with AFSR in %g4 and | |
534 | * UDBE_CE in %g1. It only clears ESTATE_ERR_CE in the | |
535 | * ESTATE Error Enable register. | |
536 | */ | |
537 | __spitfire_cee_trap_continue: | |
538 | ldxa [%g0] ASI_AFAR, %g5 ! Get AFAR | |
539 | ||
540 | rdpr %tt, %g3 | |
541 | and %g3, 0x1ff, %g3 ! Paranoia | |
542 | sllx %g3, SFSTAT_TRAP_TYPE_SHIFT, %g3 | |
543 | or %g4, %g3, %g4 | |
544 | rdpr %tl, %g3 | |
545 | cmp %g3, 1 | |
546 | mov 1, %g3 | |
547 | bleu %xcc, 1f | |
548 | sllx %g3, SFSTAT_TL_GT_ONE_SHIFT, %g3 | |
549 | ||
550 | or %g4, %g3, %g4 | |
551 | ||
552 | /* Read in the UDB error register state, clearing the | |
553 | * sticky error bits as-needed. We only clear them if | |
554 | * the UE bit is set. Likewise, __spitfire_cee_trap | |
555 | * below will only do so if the CE bit is set. | |
556 | * | |
557 | * NOTE: UltraSparc-I/II have high and low UDB error | |
558 | * registers, corresponding to the two UDB units | |
559 | * present on those chips. UltraSparc-IIi only | |
560 | * has a single UDB, called "SDB" in the manual. | |
561 | * For IIi the upper UDB register always reads | |
562 | * as zero so for our purposes things will just | |
563 | * work with the checks below. | |
564 | */ | |
565 | 1: ldxa [%g0] ASI_UDBH_ERROR_R, %g3 | |
566 | and %g3, 0x3ff, %g7 ! Paranoia | |
567 | sllx %g7, SFSTAT_UDBH_SHIFT, %g7 | |
568 | or %g4, %g7, %g4 | |
569 | andcc %g3, %g1, %g3 ! UDBE_UE or UDBE_CE | |
570 | be,pn %xcc, 1f | |
571 | nop | |
572 | stxa %g3, [%g0] ASI_UDB_ERROR_W | |
573 | membar #Sync | |
574 | ||
575 | 1: mov 0x18, %g3 | |
576 | ldxa [%g3] ASI_UDBL_ERROR_R, %g3 | |
577 | and %g3, 0x3ff, %g7 ! Paranoia | |
578 | sllx %g7, SFSTAT_UDBL_SHIFT, %g7 | |
579 | or %g4, %g7, %g4 | |
580 | andcc %g3, %g1, %g3 ! UDBE_UE or UDBE_CE | |
581 | be,pn %xcc, 1f | |
582 | nop | |
583 | mov 0x18, %g7 | |
584 | stxa %g3, [%g7] ASI_UDB_ERROR_W | |
585 | membar #Sync | |
586 | ||
587 | 1: /* Ok, now that we've latched the error state, | |
588 | * clear the sticky bits in the AFSR. | |
589 | */ | |
590 | stxa %g4, [%g0] ASI_AFSR | |
591 | membar #Sync | |
592 | ||
593 | rdpr %tl, %g2 | |
594 | cmp %g2, 1 | |
595 | rdpr %pil, %g2 | |
596 | bleu,pt %xcc, 1f | |
597 | wrpr %g0, 15, %pil | |
598 | ||
599 | ba,pt %xcc, etraptl1 | |
600 | rd %pc, %g7 | |
601 | ||
602 | ba,pt %xcc, 2f | |
603 | nop | |
604 | ||
605 | 1: ba,pt %xcc, etrap_irq | |
606 | rd %pc, %g7 | |
607 | ||
608 | 2: mov %l4, %o1 | |
609 | mov %l5, %o2 | |
610 | call spitfire_access_error | |
611 | add %sp, PTREGS_OFF, %o0 | |
612 | ba,pt %xcc, rtrap | |
613 | clr %l6 | |
614 | ||
615 | /* This is the trap handler entry point for ECC correctable | |
616 | * errors. They are corrected, but we listen for the trap | |
617 | * so that the event can be logged. | |
618 | * | |
619 | * Disrupting errors are either: | |
620 | * 1) single-bit ECC errors during UDB reads to system | |
621 | * memory | |
622 | * 2) data parity errors during write-back events | |
623 | * | |
624 | * As far as I can make out from the manual, the CEE trap | |
625 | * is only for correctable errors during memory read | |
626 | * accesses by the front-end of the processor. | |
627 | * | |
628 | * The code below is only for trap level 1 CEE events, | |
629 | * as it is the only situation where we can safely record | |
630 | * and log. For trap level >1 we just clear the CE bit | |
631 | * in the AFSR and return. | |
632 | * | |
633 | * This is just like __spiftire_access_error above, but it | |
634 | * specifically handles correctable errors. If an | |
635 | * uncorrectable error is indicated in the AFSR we | |
636 | * will branch directly above to __spitfire_access_error | |
637 | * to handle it instead. Uncorrectable therefore takes | |
638 | * priority over correctable, and the error logging | |
639 | * C code will notice this case by inspecting the | |
640 | * trap type. | |
641 | */ | |
642 | .globl __spitfire_cee_trap | |
643 | __spitfire_cee_trap: | |
644 | ldxa [%g0] ASI_AFSR, %g4 ! Get AFSR | |
645 | mov 1, %g3 | |
646 | sllx %g3, SFAFSR_UE_SHIFT, %g3 | |
647 | andcc %g4, %g3, %g0 ! Check for UE | |
648 | bne,pn %xcc, __spitfire_access_error | |
649 | nop | |
650 | ||
651 | /* Ok, in this case we only have a correctable error. | |
652 | * Indicate we only wish to capture that state in register | |
653 | * %g1, and we only disable CE error reporting unlike UE | |
654 | * handling which disables all errors. | |
655 | */ | |
656 | ldxa [%g0] ASI_ESTATE_ERROR_EN, %g3 | |
657 | andn %g3, ESTATE_ERR_CE, %g3 | |
658 | stxa %g3, [%g0] ASI_ESTATE_ERROR_EN | |
659 | membar #Sync | |
660 | ||
661 | /* Preserve AFSR in %g4, indicate UDB state to capture in %g1 */ | |
662 | ba,pt %xcc, __spitfire_cee_trap_continue | |
663 | mov UDBE_CE, %g1 | |
664 | ||
665 | .globl __spitfire_data_access_exception | |
666 | .globl __spitfire_data_access_exception_tl1 | |
667 | __spitfire_data_access_exception_tl1: | |
1da177e4 LT |
668 | rdpr %pstate, %g4 |
669 | wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate | |
670 | mov TLB_SFSR, %g3 | |
671 | mov DMMU_SFAR, %g5 | |
672 | ldxa [%g3] ASI_DMMU, %g4 ! Get SFSR | |
673 | ldxa [%g5] ASI_DMMU, %g5 ! Get SFAR | |
674 | stxa %g0, [%g3] ASI_DMMU ! Clear SFSR.FaultValid bit | |
675 | membar #Sync | |
bde4e4ee DM |
676 | rdpr %tt, %g3 |
677 | cmp %g3, 0x80 ! first win spill/fill trap | |
678 | blu,pn %xcc, 1f | |
679 | cmp %g3, 0xff ! last win spill/fill trap | |
680 | bgu,pn %xcc, 1f | |
681 | nop | |
1da177e4 LT |
682 | ba,pt %xcc, winfix_dax |
683 | rdpr %tpc, %g3 | |
bde4e4ee DM |
684 | 1: sethi %hi(109f), %g7 |
685 | ba,pt %xcc, etraptl1 | |
686 | 109: or %g7, %lo(109b), %g7 | |
687 | mov %l4, %o1 | |
688 | mov %l5, %o2 | |
6c52a96e | 689 | call spitfire_data_access_exception_tl1 |
bde4e4ee DM |
690 | add %sp, PTREGS_OFF, %o0 |
691 | ba,pt %xcc, rtrap | |
692 | clr %l6 | |
693 | ||
6c52a96e | 694 | __spitfire_data_access_exception: |
1da177e4 LT |
695 | rdpr %pstate, %g4 |
696 | wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate | |
697 | mov TLB_SFSR, %g3 | |
698 | mov DMMU_SFAR, %g5 | |
699 | ldxa [%g3] ASI_DMMU, %g4 ! Get SFSR | |
700 | ldxa [%g5] ASI_DMMU, %g5 ! Get SFAR | |
701 | stxa %g0, [%g3] ASI_DMMU ! Clear SFSR.FaultValid bit | |
702 | membar #Sync | |
703 | sethi %hi(109f), %g7 | |
704 | ba,pt %xcc, etrap | |
705 | 109: or %g7, %lo(109b), %g7 | |
706 | mov %l4, %o1 | |
707 | mov %l5, %o2 | |
6c52a96e | 708 | call spitfire_data_access_exception |
1da177e4 LT |
709 | add %sp, PTREGS_OFF, %o0 |
710 | ba,pt %xcc, rtrap | |
711 | clr %l6 | |
712 | ||
6c52a96e DM |
713 | .globl __spitfire_insn_access_exception |
714 | .globl __spitfire_insn_access_exception_tl1 | |
715 | __spitfire_insn_access_exception_tl1: | |
1da177e4 LT |
716 | rdpr %pstate, %g4 |
717 | wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate | |
718 | mov TLB_SFSR, %g3 | |
5ea68e02 DM |
719 | ldxa [%g3] ASI_IMMU, %g4 ! Get SFSR |
720 | rdpr %tpc, %g5 ! IMMU has no SFAR, use TPC | |
1da177e4 LT |
721 | stxa %g0, [%g3] ASI_IMMU ! Clear FaultValid bit |
722 | membar #Sync | |
723 | sethi %hi(109f), %g7 | |
724 | ba,pt %xcc, etraptl1 | |
725 | 109: or %g7, %lo(109b), %g7 | |
726 | mov %l4, %o1 | |
727 | mov %l5, %o2 | |
6c52a96e | 728 | call spitfire_insn_access_exception_tl1 |
1da177e4 LT |
729 | add %sp, PTREGS_OFF, %o0 |
730 | ba,pt %xcc, rtrap | |
731 | clr %l6 | |
732 | ||
6c52a96e | 733 | __spitfire_insn_access_exception: |
1da177e4 LT |
734 | rdpr %pstate, %g4 |
735 | wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate | |
736 | mov TLB_SFSR, %g3 | |
5ea68e02 DM |
737 | ldxa [%g3] ASI_IMMU, %g4 ! Get SFSR |
738 | rdpr %tpc, %g5 ! IMMU has no SFAR, use TPC | |
1da177e4 LT |
739 | stxa %g0, [%g3] ASI_IMMU ! Clear FaultValid bit |
740 | membar #Sync | |
741 | sethi %hi(109f), %g7 | |
742 | ba,pt %xcc, etrap | |
743 | 109: or %g7, %lo(109b), %g7 | |
744 | mov %l4, %o1 | |
745 | mov %l5, %o2 | |
6c52a96e | 746 | call spitfire_insn_access_exception |
1da177e4 LT |
747 | add %sp, PTREGS_OFF, %o0 |
748 | ba,pt %xcc, rtrap | |
749 | clr %l6 | |
750 | ||
1da177e4 LT |
751 | /* These get patched into the trap table at boot time |
752 | * once we know we have a cheetah processor. | |
753 | */ | |
754 | .globl cheetah_fecc_trap_vector, cheetah_fecc_trap_vector_tl1 | |
755 | cheetah_fecc_trap_vector: | |
756 | membar #Sync | |
757 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1 | |
758 | andn %g1, DCU_DC | DCU_IC, %g1 | |
759 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG | |
760 | membar #Sync | |
761 | sethi %hi(cheetah_fast_ecc), %g2 | |
762 | jmpl %g2 + %lo(cheetah_fast_ecc), %g0 | |
763 | mov 0, %g1 | |
764 | cheetah_fecc_trap_vector_tl1: | |
765 | membar #Sync | |
766 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1 | |
767 | andn %g1, DCU_DC | DCU_IC, %g1 | |
768 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG | |
769 | membar #Sync | |
770 | sethi %hi(cheetah_fast_ecc), %g2 | |
771 | jmpl %g2 + %lo(cheetah_fast_ecc), %g0 | |
772 | mov 1, %g1 | |
773 | .globl cheetah_cee_trap_vector, cheetah_cee_trap_vector_tl1 | |
774 | cheetah_cee_trap_vector: | |
775 | membar #Sync | |
776 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1 | |
777 | andn %g1, DCU_IC, %g1 | |
778 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG | |
779 | membar #Sync | |
780 | sethi %hi(cheetah_cee), %g2 | |
781 | jmpl %g2 + %lo(cheetah_cee), %g0 | |
782 | mov 0, %g1 | |
783 | cheetah_cee_trap_vector_tl1: | |
784 | membar #Sync | |
785 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1 | |
786 | andn %g1, DCU_IC, %g1 | |
787 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG | |
788 | membar #Sync | |
789 | sethi %hi(cheetah_cee), %g2 | |
790 | jmpl %g2 + %lo(cheetah_cee), %g0 | |
791 | mov 1, %g1 | |
792 | .globl cheetah_deferred_trap_vector, cheetah_deferred_trap_vector_tl1 | |
793 | cheetah_deferred_trap_vector: | |
794 | membar #Sync | |
795 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1; | |
796 | andn %g1, DCU_DC | DCU_IC, %g1; | |
797 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG; | |
798 | membar #Sync; | |
799 | sethi %hi(cheetah_deferred_trap), %g2 | |
800 | jmpl %g2 + %lo(cheetah_deferred_trap), %g0 | |
801 | mov 0, %g1 | |
802 | cheetah_deferred_trap_vector_tl1: | |
803 | membar #Sync; | |
804 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1; | |
805 | andn %g1, DCU_DC | DCU_IC, %g1; | |
806 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG; | |
807 | membar #Sync; | |
808 | sethi %hi(cheetah_deferred_trap), %g2 | |
809 | jmpl %g2 + %lo(cheetah_deferred_trap), %g0 | |
810 | mov 1, %g1 | |
811 | ||
812 | /* Cheetah+ specific traps. These are for the new I/D cache parity | |
813 | * error traps. The first argument to cheetah_plus_parity_handler | |
814 | * is encoded as follows: | |
815 | * | |
816 | * Bit0: 0=dcache,1=icache | |
817 | * Bit1: 0=recoverable,1=unrecoverable | |
818 | */ | |
819 | .globl cheetah_plus_dcpe_trap_vector, cheetah_plus_dcpe_trap_vector_tl1 | |
820 | cheetah_plus_dcpe_trap_vector: | |
821 | membar #Sync | |
822 | sethi %hi(do_cheetah_plus_data_parity), %g7 | |
823 | jmpl %g7 + %lo(do_cheetah_plus_data_parity), %g0 | |
824 | nop | |
825 | nop | |
826 | nop | |
827 | nop | |
828 | nop | |
829 | ||
830 | do_cheetah_plus_data_parity: | |
80dc0d6b DM |
831 | rdpr %pil, %g2 |
832 | wrpr %g0, 15, %pil | |
833 | ba,pt %xcc, etrap_irq | |
1da177e4 LT |
834 | rd %pc, %g7 |
835 | mov 0x0, %o0 | |
836 | call cheetah_plus_parity_error | |
837 | add %sp, PTREGS_OFF, %o1 | |
80dc0d6b | 838 | ba,a,pt %xcc, rtrap_irq |
1da177e4 LT |
839 | |
840 | cheetah_plus_dcpe_trap_vector_tl1: | |
841 | membar #Sync | |
842 | wrpr PSTATE_IG | PSTATE_PEF | PSTATE_PRIV, %pstate | |
843 | sethi %hi(do_dcpe_tl1), %g3 | |
844 | jmpl %g3 + %lo(do_dcpe_tl1), %g0 | |
845 | nop | |
846 | nop | |
847 | nop | |
848 | nop | |
849 | ||
850 | .globl cheetah_plus_icpe_trap_vector, cheetah_plus_icpe_trap_vector_tl1 | |
851 | cheetah_plus_icpe_trap_vector: | |
852 | membar #Sync | |
853 | sethi %hi(do_cheetah_plus_insn_parity), %g7 | |
854 | jmpl %g7 + %lo(do_cheetah_plus_insn_parity), %g0 | |
855 | nop | |
856 | nop | |
857 | nop | |
858 | nop | |
859 | nop | |
860 | ||
861 | do_cheetah_plus_insn_parity: | |
80dc0d6b DM |
862 | rdpr %pil, %g2 |
863 | wrpr %g0, 15, %pil | |
864 | ba,pt %xcc, etrap_irq | |
1da177e4 LT |
865 | rd %pc, %g7 |
866 | mov 0x1, %o0 | |
867 | call cheetah_plus_parity_error | |
868 | add %sp, PTREGS_OFF, %o1 | |
80dc0d6b | 869 | ba,a,pt %xcc, rtrap_irq |
1da177e4 LT |
870 | |
871 | cheetah_plus_icpe_trap_vector_tl1: | |
872 | membar #Sync | |
873 | wrpr PSTATE_IG | PSTATE_PEF | PSTATE_PRIV, %pstate | |
874 | sethi %hi(do_icpe_tl1), %g3 | |
875 | jmpl %g3 + %lo(do_icpe_tl1), %g0 | |
876 | nop | |
877 | nop | |
878 | nop | |
879 | nop | |
880 | ||
881 | /* If we take one of these traps when tl >= 1, then we | |
882 | * jump to interrupt globals. If some trap level above us | |
883 | * was also using interrupt globals, we cannot recover. | |
884 | * We may use all interrupt global registers except %g6. | |
885 | */ | |
886 | .globl do_dcpe_tl1, do_icpe_tl1 | |
887 | do_dcpe_tl1: | |
888 | rdpr %tl, %g1 ! Save original trap level | |
889 | mov 1, %g2 ! Setup TSTATE checking loop | |
890 | sethi %hi(TSTATE_IG), %g3 ! TSTATE mask bit | |
891 | 1: wrpr %g2, %tl ! Set trap level to check | |
892 | rdpr %tstate, %g4 ! Read TSTATE for this level | |
893 | andcc %g4, %g3, %g0 ! Interrupt globals in use? | |
894 | bne,a,pn %xcc, do_dcpe_tl1_fatal ! Yep, irrecoverable | |
895 | wrpr %g1, %tl ! Restore original trap level | |
896 | add %g2, 1, %g2 ! Next trap level | |
897 | cmp %g2, %g1 ! Hit them all yet? | |
898 | ble,pt %icc, 1b ! Not yet | |
899 | nop | |
900 | wrpr %g1, %tl ! Restore original trap level | |
901 | do_dcpe_tl1_nonfatal: /* Ok we may use interrupt globals safely. */ | |
80dc0d6b DM |
902 | sethi %hi(dcache_parity_tl1_occurred), %g2 |
903 | lduw [%g2 + %lo(dcache_parity_tl1_occurred)], %g1 | |
904 | add %g1, 1, %g1 | |
905 | stw %g1, [%g2 + %lo(dcache_parity_tl1_occurred)] | |
1da177e4 LT |
906 | /* Reset D-cache parity */ |
907 | sethi %hi(1 << 16), %g1 ! D-cache size | |
908 | mov (1 << 5), %g2 ! D-cache line size | |
909 | sub %g1, %g2, %g1 ! Move down 1 cacheline | |
910 | 1: srl %g1, 14, %g3 ! Compute UTAG | |
911 | membar #Sync | |
912 | stxa %g3, [%g1] ASI_DCACHE_UTAG | |
913 | membar #Sync | |
914 | sub %g2, 8, %g3 ! 64-bit data word within line | |
915 | 2: membar #Sync | |
916 | stxa %g0, [%g1 + %g3] ASI_DCACHE_DATA | |
917 | membar #Sync | |
918 | subcc %g3, 8, %g3 ! Next 64-bit data word | |
919 | bge,pt %icc, 2b | |
920 | nop | |
921 | subcc %g1, %g2, %g1 ! Next cacheline | |
922 | bge,pt %icc, 1b | |
923 | nop | |
924 | ba,pt %xcc, dcpe_icpe_tl1_common | |
925 | nop | |
926 | ||
927 | do_dcpe_tl1_fatal: | |
928 | sethi %hi(1f), %g7 | |
929 | ba,pt %xcc, etraptl1 | |
930 | 1: or %g7, %lo(1b), %g7 | |
931 | mov 0x2, %o0 | |
932 | call cheetah_plus_parity_error | |
933 | add %sp, PTREGS_OFF, %o1 | |
934 | ba,pt %xcc, rtrap | |
935 | clr %l6 | |
936 | ||
937 | do_icpe_tl1: | |
938 | rdpr %tl, %g1 ! Save original trap level | |
939 | mov 1, %g2 ! Setup TSTATE checking loop | |
940 | sethi %hi(TSTATE_IG), %g3 ! TSTATE mask bit | |
941 | 1: wrpr %g2, %tl ! Set trap level to check | |
942 | rdpr %tstate, %g4 ! Read TSTATE for this level | |
943 | andcc %g4, %g3, %g0 ! Interrupt globals in use? | |
944 | bne,a,pn %xcc, do_icpe_tl1_fatal ! Yep, irrecoverable | |
945 | wrpr %g1, %tl ! Restore original trap level | |
946 | add %g2, 1, %g2 ! Next trap level | |
947 | cmp %g2, %g1 ! Hit them all yet? | |
948 | ble,pt %icc, 1b ! Not yet | |
949 | nop | |
950 | wrpr %g1, %tl ! Restore original trap level | |
951 | do_icpe_tl1_nonfatal: /* Ok we may use interrupt globals safely. */ | |
80dc0d6b DM |
952 | sethi %hi(icache_parity_tl1_occurred), %g2 |
953 | lduw [%g2 + %lo(icache_parity_tl1_occurred)], %g1 | |
954 | add %g1, 1, %g1 | |
955 | stw %g1, [%g2 + %lo(icache_parity_tl1_occurred)] | |
1da177e4 LT |
956 | /* Flush I-cache */ |
957 | sethi %hi(1 << 15), %g1 ! I-cache size | |
958 | mov (1 << 5), %g2 ! I-cache line size | |
959 | sub %g1, %g2, %g1 | |
960 | 1: or %g1, (2 << 3), %g3 | |
961 | stxa %g0, [%g3] ASI_IC_TAG | |
962 | membar #Sync | |
963 | subcc %g1, %g2, %g1 | |
964 | bge,pt %icc, 1b | |
965 | nop | |
966 | ba,pt %xcc, dcpe_icpe_tl1_common | |
967 | nop | |
968 | ||
969 | do_icpe_tl1_fatal: | |
970 | sethi %hi(1f), %g7 | |
971 | ba,pt %xcc, etraptl1 | |
972 | 1: or %g7, %lo(1b), %g7 | |
973 | mov 0x3, %o0 | |
974 | call cheetah_plus_parity_error | |
975 | add %sp, PTREGS_OFF, %o1 | |
976 | ba,pt %xcc, rtrap | |
977 | clr %l6 | |
978 | ||
979 | dcpe_icpe_tl1_common: | |
980 | /* Flush D-cache, re-enable D/I caches in DCU and finally | |
981 | * retry the trapping instruction. | |
982 | */ | |
983 | sethi %hi(1 << 16), %g1 ! D-cache size | |
984 | mov (1 << 5), %g2 ! D-cache line size | |
985 | sub %g1, %g2, %g1 | |
986 | 1: stxa %g0, [%g1] ASI_DCACHE_TAG | |
987 | membar #Sync | |
988 | subcc %g1, %g2, %g1 | |
989 | bge,pt %icc, 1b | |
990 | nop | |
991 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1 | |
992 | or %g1, (DCU_DC | DCU_IC), %g1 | |
993 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG | |
994 | membar #Sync | |
995 | retry | |
996 | ||
3c2cafaf DM |
997 | /* Capture I/D/E-cache state into per-cpu error scoreboard. |
998 | * | |
999 | * %g1: (TL>=0) ? 1 : 0 | |
1000 | * %g2: scratch | |
1001 | * %g3: scratch | |
1002 | * %g4: AFSR | |
1003 | * %g5: AFAR | |
1004 | * %g6: current thread ptr | |
1005 | * %g7: scratch | |
1006 | */ | |
1007 | __cheetah_log_error: | |
1008 | /* Put "TL1" software bit into AFSR. */ | |
1009 | and %g1, 0x1, %g1 | |
1010 | sllx %g1, 63, %g2 | |
1011 | or %g4, %g2, %g4 | |
1012 | ||
1013 | /* Get log entry pointer for this cpu at this trap level. */ | |
1014 | BRANCH_IF_JALAPENO(g2,g3,50f) | |
1015 | ldxa [%g0] ASI_SAFARI_CONFIG, %g2 | |
1016 | srlx %g2, 17, %g2 | |
1017 | ba,pt %xcc, 60f | |
1018 | and %g2, 0x3ff, %g2 | |
1019 | ||
1020 | 50: ldxa [%g0] ASI_JBUS_CONFIG, %g2 | |
1021 | srlx %g2, 17, %g2 | |
1022 | and %g2, 0x1f, %g2 | |
1023 | ||
1024 | 60: sllx %g2, 9, %g2 | |
1025 | sethi %hi(cheetah_error_log), %g3 | |
1026 | ldx [%g3 + %lo(cheetah_error_log)], %g3 | |
1027 | brz,pn %g3, 80f | |
1028 | nop | |
1029 | ||
1030 | add %g3, %g2, %g3 | |
1031 | sllx %g1, 8, %g1 | |
1032 | add %g3, %g1, %g1 | |
1033 | ||
1034 | /* %g1 holds pointer to the top of the logging scoreboard */ | |
1035 | ldx [%g1 + 0x0], %g7 | |
1036 | cmp %g7, -1 | |
1037 | bne,pn %xcc, 80f | |
1038 | nop | |
1039 | ||
1040 | stx %g4, [%g1 + 0x0] | |
1041 | stx %g5, [%g1 + 0x8] | |
1042 | add %g1, 0x10, %g1 | |
1043 | ||
1044 | /* %g1 now points to D-cache logging area */ | |
1045 | set 0x3ff8, %g2 /* DC_addr mask */ | |
1046 | and %g5, %g2, %g2 /* DC_addr bits of AFAR */ | |
1047 | srlx %g5, 12, %g3 | |
1048 | or %g3, 1, %g3 /* PHYS tag + valid */ | |
1049 | ||
1050 | 10: ldxa [%g2] ASI_DCACHE_TAG, %g7 | |
1051 | cmp %g3, %g7 /* TAG match? */ | |
1052 | bne,pt %xcc, 13f | |
1053 | nop | |
1054 | ||
1055 | /* Yep, what we want, capture state. */ | |
1056 | stx %g2, [%g1 + 0x20] | |
1057 | stx %g7, [%g1 + 0x28] | |
1058 | ||
1059 | /* A membar Sync is required before and after utag access. */ | |
1060 | membar #Sync | |
1061 | ldxa [%g2] ASI_DCACHE_UTAG, %g7 | |
1062 | membar #Sync | |
1063 | stx %g7, [%g1 + 0x30] | |
1064 | ldxa [%g2] ASI_DCACHE_SNOOP_TAG, %g7 | |
1065 | stx %g7, [%g1 + 0x38] | |
1066 | clr %g3 | |
1067 | ||
1068 | 12: ldxa [%g2 + %g3] ASI_DCACHE_DATA, %g7 | |
1069 | stx %g7, [%g1] | |
1070 | add %g3, (1 << 5), %g3 | |
1071 | cmp %g3, (4 << 5) | |
1072 | bl,pt %xcc, 12b | |
1073 | add %g1, 0x8, %g1 | |
1074 | ||
1075 | ba,pt %xcc, 20f | |
1076 | add %g1, 0x20, %g1 | |
1077 | ||
1078 | 13: sethi %hi(1 << 14), %g7 | |
1079 | add %g2, %g7, %g2 | |
1080 | srlx %g2, 14, %g7 | |
1081 | cmp %g7, 4 | |
1082 | bl,pt %xcc, 10b | |
1083 | nop | |
1084 | ||
1085 | add %g1, 0x40, %g1 | |
1086 | ||
1087 | /* %g1 now points to I-cache logging area */ | |
1088 | 20: set 0x1fe0, %g2 /* IC_addr mask */ | |
1089 | and %g5, %g2, %g2 /* IC_addr bits of AFAR */ | |
1090 | sllx %g2, 1, %g2 /* IC_addr[13:6]==VA[12:5] */ | |
1091 | srlx %g5, (13 - 8), %g3 /* Make PTAG */ | |
1092 | andn %g3, 0xff, %g3 /* Mask off undefined bits */ | |
1093 | ||
1094 | 21: ldxa [%g2] ASI_IC_TAG, %g7 | |
1095 | andn %g7, 0xff, %g7 | |
1096 | cmp %g3, %g7 | |
1097 | bne,pt %xcc, 23f | |
1098 | nop | |
1099 | ||
1100 | /* Yep, what we want, capture state. */ | |
1101 | stx %g2, [%g1 + 0x40] | |
1102 | stx %g7, [%g1 + 0x48] | |
1103 | add %g2, (1 << 3), %g2 | |
1104 | ldxa [%g2] ASI_IC_TAG, %g7 | |
1105 | add %g2, (1 << 3), %g2 | |
1106 | stx %g7, [%g1 + 0x50] | |
1107 | ldxa [%g2] ASI_IC_TAG, %g7 | |
1108 | add %g2, (1 << 3), %g2 | |
1109 | stx %g7, [%g1 + 0x60] | |
1110 | ldxa [%g2] ASI_IC_TAG, %g7 | |
1111 | stx %g7, [%g1 + 0x68] | |
1112 | sub %g2, (3 << 3), %g2 | |
1113 | ldxa [%g2] ASI_IC_STAG, %g7 | |
1114 | stx %g7, [%g1 + 0x58] | |
1115 | clr %g3 | |
1116 | srlx %g2, 2, %g2 | |
1117 | ||
1118 | 22: ldxa [%g2 + %g3] ASI_IC_INSTR, %g7 | |
1119 | stx %g7, [%g1] | |
1120 | add %g3, (1 << 3), %g3 | |
1121 | cmp %g3, (8 << 3) | |
1122 | bl,pt %xcc, 22b | |
1123 | add %g1, 0x8, %g1 | |
1124 | ||
1125 | ba,pt %xcc, 30f | |
1126 | add %g1, 0x30, %g1 | |
1127 | ||
1128 | 23: sethi %hi(1 << 14), %g7 | |
1129 | add %g2, %g7, %g2 | |
1130 | srlx %g2, 14, %g7 | |
1131 | cmp %g7, 4 | |
1132 | bl,pt %xcc, 21b | |
1133 | nop | |
1134 | ||
1135 | add %g1, 0x70, %g1 | |
1136 | ||
1137 | /* %g1 now points to E-cache logging area */ | |
1138 | 30: andn %g5, (32 - 1), %g2 | |
1139 | stx %g2, [%g1 + 0x20] | |
1140 | ldxa [%g2] ASI_EC_TAG_DATA, %g7 | |
1141 | stx %g7, [%g1 + 0x28] | |
1142 | ldxa [%g2] ASI_EC_R, %g0 | |
1143 | clr %g3 | |
1144 | ||
1145 | 31: ldxa [%g3] ASI_EC_DATA, %g7 | |
1146 | stx %g7, [%g1 + %g3] | |
1147 | add %g3, 0x8, %g3 | |
1148 | cmp %g3, 0x20 | |
1149 | ||
1150 | bl,pt %xcc, 31b | |
1151 | nop | |
1152 | 80: | |
1153 | rdpr %tt, %g2 | |
1154 | cmp %g2, 0x70 | |
1155 | be c_fast_ecc | |
1156 | cmp %g2, 0x63 | |
1157 | be c_cee | |
1158 | nop | |
1159 | ba,pt %xcc, c_deferred | |
1160 | ||
1da177e4 LT |
1161 | /* Cheetah FECC trap handling, we get here from tl{0,1}_fecc |
1162 | * in the trap table. That code has done a memory barrier | |
1163 | * and has disabled both the I-cache and D-cache in the DCU | |
1164 | * control register. The I-cache is disabled so that we may | |
1165 | * capture the corrupted cache line, and the D-cache is disabled | |
1166 | * because corrupt data may have been placed there and we don't | |
1167 | * want to reference it. | |
1168 | * | |
1169 | * %g1 is one if this trap occurred at %tl >= 1. | |
1170 | * | |
1171 | * Next, we turn off error reporting so that we don't recurse. | |
1172 | */ | |
1173 | .globl cheetah_fast_ecc | |
1174 | cheetah_fast_ecc: | |
1175 | ldxa [%g0] ASI_ESTATE_ERROR_EN, %g2 | |
1176 | andn %g2, ESTATE_ERROR_NCEEN | ESTATE_ERROR_CEEN, %g2 | |
1177 | stxa %g2, [%g0] ASI_ESTATE_ERROR_EN | |
1178 | membar #Sync | |
1179 | ||
1180 | /* Fetch and clear AFSR/AFAR */ | |
1181 | ldxa [%g0] ASI_AFSR, %g4 | |
1182 | ldxa [%g0] ASI_AFAR, %g5 | |
1183 | stxa %g4, [%g0] ASI_AFSR | |
1184 | membar #Sync | |
1185 | ||
3c2cafaf DM |
1186 | ba,pt %xcc, __cheetah_log_error |
1187 | nop | |
1da177e4 | 1188 | |
3c2cafaf | 1189 | c_fast_ecc: |
1da177e4 LT |
1190 | rdpr %pil, %g2 |
1191 | wrpr %g0, 15, %pil | |
1192 | ba,pt %xcc, etrap_irq | |
1193 | rd %pc, %g7 | |
1194 | mov %l4, %o1 | |
1195 | mov %l5, %o2 | |
1196 | call cheetah_fecc_handler | |
1197 | add %sp, PTREGS_OFF, %o0 | |
1198 | ba,a,pt %xcc, rtrap_irq | |
1199 | ||
1200 | /* Our caller has disabled I-cache and performed membar Sync. */ | |
1201 | .globl cheetah_cee | |
1202 | cheetah_cee: | |
1203 | ldxa [%g0] ASI_ESTATE_ERROR_EN, %g2 | |
1204 | andn %g2, ESTATE_ERROR_CEEN, %g2 | |
1205 | stxa %g2, [%g0] ASI_ESTATE_ERROR_EN | |
1206 | membar #Sync | |
1207 | ||
1208 | /* Fetch and clear AFSR/AFAR */ | |
1209 | ldxa [%g0] ASI_AFSR, %g4 | |
1210 | ldxa [%g0] ASI_AFAR, %g5 | |
1211 | stxa %g4, [%g0] ASI_AFSR | |
1212 | membar #Sync | |
1213 | ||
3c2cafaf DM |
1214 | ba,pt %xcc, __cheetah_log_error |
1215 | nop | |
1da177e4 | 1216 | |
3c2cafaf | 1217 | c_cee: |
1da177e4 LT |
1218 | rdpr %pil, %g2 |
1219 | wrpr %g0, 15, %pil | |
1220 | ba,pt %xcc, etrap_irq | |
1221 | rd %pc, %g7 | |
1222 | mov %l4, %o1 | |
1223 | mov %l5, %o2 | |
1224 | call cheetah_cee_handler | |
1225 | add %sp, PTREGS_OFF, %o0 | |
1226 | ba,a,pt %xcc, rtrap_irq | |
1227 | ||
1228 | /* Our caller has disabled I-cache+D-cache and performed membar Sync. */ | |
1229 | .globl cheetah_deferred_trap | |
1230 | cheetah_deferred_trap: | |
1231 | ldxa [%g0] ASI_ESTATE_ERROR_EN, %g2 | |
1232 | andn %g2, ESTATE_ERROR_NCEEN | ESTATE_ERROR_CEEN, %g2 | |
1233 | stxa %g2, [%g0] ASI_ESTATE_ERROR_EN | |
1234 | membar #Sync | |
1235 | ||
1236 | /* Fetch and clear AFSR/AFAR */ | |
1237 | ldxa [%g0] ASI_AFSR, %g4 | |
1238 | ldxa [%g0] ASI_AFAR, %g5 | |
1239 | stxa %g4, [%g0] ASI_AFSR | |
1240 | membar #Sync | |
1241 | ||
3c2cafaf DM |
1242 | ba,pt %xcc, __cheetah_log_error |
1243 | nop | |
1da177e4 | 1244 | |
3c2cafaf | 1245 | c_deferred: |
1da177e4 LT |
1246 | rdpr %pil, %g2 |
1247 | wrpr %g0, 15, %pil | |
1248 | ba,pt %xcc, etrap_irq | |
1249 | rd %pc, %g7 | |
1250 | mov %l4, %o1 | |
1251 | mov %l5, %o2 | |
1252 | call cheetah_deferred_handler | |
1253 | add %sp, PTREGS_OFF, %o0 | |
1254 | ba,a,pt %xcc, rtrap_irq | |
1255 | ||
1256 | .globl __do_privact | |
1257 | __do_privact: | |
1258 | mov TLB_SFSR, %g3 | |
1259 | stxa %g0, [%g3] ASI_DMMU ! Clear FaultValid bit | |
1260 | membar #Sync | |
1261 | sethi %hi(109f), %g7 | |
1262 | ba,pt %xcc, etrap | |
1263 | 109: or %g7, %lo(109b), %g7 | |
1264 | call do_privact | |
1265 | add %sp, PTREGS_OFF, %o0 | |
1266 | ba,pt %xcc, rtrap | |
1267 | clr %l6 | |
1268 | ||
1269 | .globl do_mna | |
1270 | do_mna: | |
1271 | rdpr %tl, %g3 | |
1272 | cmp %g3, 1 | |
1273 | ||
1274 | /* Setup %g4/%g5 now as they are used in the | |
1275 | * winfixup code. | |
1276 | */ | |
1277 | mov TLB_SFSR, %g3 | |
1278 | mov DMMU_SFAR, %g4 | |
1279 | ldxa [%g4] ASI_DMMU, %g4 | |
1280 | ldxa [%g3] ASI_DMMU, %g5 | |
1281 | stxa %g0, [%g3] ASI_DMMU ! Clear FaultValid bit | |
1282 | membar #Sync | |
1283 | bgu,pn %icc, winfix_mna | |
1284 | rdpr %tpc, %g3 | |
1285 | ||
1286 | 1: sethi %hi(109f), %g7 | |
1287 | ba,pt %xcc, etrap | |
1288 | 109: or %g7, %lo(109b), %g7 | |
1289 | mov %l4, %o1 | |
1290 | mov %l5, %o2 | |
1291 | call mem_address_unaligned | |
1292 | add %sp, PTREGS_OFF, %o0 | |
1293 | ba,pt %xcc, rtrap | |
1294 | clr %l6 | |
1295 | ||
1296 | .globl do_lddfmna | |
1297 | do_lddfmna: | |
1298 | sethi %hi(109f), %g7 | |
1299 | mov TLB_SFSR, %g4 | |
1300 | ldxa [%g4] ASI_DMMU, %g5 | |
1301 | stxa %g0, [%g4] ASI_DMMU ! Clear FaultValid bit | |
1302 | membar #Sync | |
1303 | mov DMMU_SFAR, %g4 | |
1304 | ldxa [%g4] ASI_DMMU, %g4 | |
1305 | ba,pt %xcc, etrap | |
1306 | 109: or %g7, %lo(109b), %g7 | |
1307 | mov %l4, %o1 | |
1308 | mov %l5, %o2 | |
1309 | call handle_lddfmna | |
1310 | add %sp, PTREGS_OFF, %o0 | |
1311 | ba,pt %xcc, rtrap | |
1312 | clr %l6 | |
1313 | ||
1314 | .globl do_stdfmna | |
1315 | do_stdfmna: | |
1316 | sethi %hi(109f), %g7 | |
1317 | mov TLB_SFSR, %g4 | |
1318 | ldxa [%g4] ASI_DMMU, %g5 | |
1319 | stxa %g0, [%g4] ASI_DMMU ! Clear FaultValid bit | |
1320 | membar #Sync | |
1321 | mov DMMU_SFAR, %g4 | |
1322 | ldxa [%g4] ASI_DMMU, %g4 | |
1323 | ba,pt %xcc, etrap | |
1324 | 109: or %g7, %lo(109b), %g7 | |
1325 | mov %l4, %o1 | |
1326 | mov %l5, %o2 | |
1327 | call handle_stdfmna | |
1328 | add %sp, PTREGS_OFF, %o0 | |
1329 | ba,pt %xcc, rtrap | |
1330 | clr %l6 | |
1331 | ||
1332 | .globl breakpoint_trap | |
1333 | breakpoint_trap: | |
1334 | call sparc_breakpoint | |
1335 | add %sp, PTREGS_OFF, %o0 | |
1336 | ba,pt %xcc, rtrap | |
1337 | nop | |
1338 | ||
1339 | #if defined(CONFIG_SUNOS_EMUL) || defined(CONFIG_SOLARIS_EMUL) || \ | |
1340 | defined(CONFIG_SOLARIS_EMUL_MODULE) | |
1341 | /* SunOS uses syscall zero as the 'indirect syscall' it looks | |
1342 | * like indir_syscall(scall_num, arg0, arg1, arg2...); etc. | |
1343 | * This is complete brain damage. | |
1344 | */ | |
1345 | .globl sunos_indir | |
1346 | sunos_indir: | |
1347 | srl %o0, 0, %o0 | |
1348 | mov %o7, %l4 | |
1349 | cmp %o0, NR_SYSCALLS | |
1350 | blu,a,pt %icc, 1f | |
1351 | sll %o0, 0x2, %o0 | |
1352 | sethi %hi(sunos_nosys), %l6 | |
1353 | b,pt %xcc, 2f | |
1354 | or %l6, %lo(sunos_nosys), %l6 | |
1355 | 1: sethi %hi(sunos_sys_table), %l7 | |
1356 | or %l7, %lo(sunos_sys_table), %l7 | |
1357 | lduw [%l7 + %o0], %l6 | |
1358 | 2: mov %o1, %o0 | |
1359 | mov %o2, %o1 | |
1360 | mov %o3, %o2 | |
1361 | mov %o4, %o3 | |
1362 | mov %o5, %o4 | |
1363 | call %l6 | |
1364 | mov %l4, %o7 | |
1365 | ||
1366 | .globl sunos_getpid | |
1367 | sunos_getpid: | |
1368 | call sys_getppid | |
1369 | nop | |
1370 | call sys_getpid | |
1371 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I1] | |
1372 | b,pt %xcc, ret_sys_call | |
1373 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I0] | |
1374 | ||
1375 | /* SunOS getuid() returns uid in %o0 and euid in %o1 */ | |
1376 | .globl sunos_getuid | |
1377 | sunos_getuid: | |
1378 | call sys32_geteuid16 | |
1379 | nop | |
1380 | call sys32_getuid16 | |
1381 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I1] | |
1382 | b,pt %xcc, ret_sys_call | |
1383 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I0] | |
1384 | ||
1385 | /* SunOS getgid() returns gid in %o0 and egid in %o1 */ | |
1386 | .globl sunos_getgid | |
1387 | sunos_getgid: | |
1388 | call sys32_getegid16 | |
1389 | nop | |
1390 | call sys32_getgid16 | |
1391 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I1] | |
1392 | b,pt %xcc, ret_sys_call | |
1393 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I0] | |
1394 | #endif | |
1395 | ||
1396 | /* SunOS's execv() call only specifies the argv argument, the | |
1397 | * environment settings are the same as the calling processes. | |
1398 | */ | |
1399 | .globl sunos_execv | |
1400 | sys_execve: | |
1401 | sethi %hi(sparc_execve), %g1 | |
1402 | ba,pt %xcc, execve_merge | |
1403 | or %g1, %lo(sparc_execve), %g1 | |
1404 | #ifdef CONFIG_COMPAT | |
1405 | .globl sys_execve | |
1406 | sunos_execv: | |
1407 | stx %g0, [%sp + PTREGS_OFF + PT_V9_I2] | |
1408 | .globl sys32_execve | |
1409 | sys32_execve: | |
1410 | sethi %hi(sparc32_execve), %g1 | |
1411 | or %g1, %lo(sparc32_execve), %g1 | |
1412 | #endif | |
1413 | execve_merge: | |
1414 | flushw | |
1415 | jmpl %g1, %g0 | |
1416 | add %sp, PTREGS_OFF, %o0 | |
1417 | ||
1418 | .globl sys_pipe, sys_sigpause, sys_nis_syscall | |
1da177e4 LT |
1419 | .globl sys_rt_sigreturn |
1420 | .globl sys_ptrace | |
1421 | .globl sys_sigaltstack | |
1422 | .align 32 | |
1423 | sys_pipe: ba,pt %xcc, sparc_pipe | |
1424 | add %sp, PTREGS_OFF, %o0 | |
1425 | sys_nis_syscall:ba,pt %xcc, c_sys_nis_syscall | |
1426 | add %sp, PTREGS_OFF, %o0 | |
1427 | sys_memory_ordering: | |
1428 | ba,pt %xcc, sparc_memory_ordering | |
1429 | add %sp, PTREGS_OFF, %o1 | |
1430 | sys_sigaltstack:ba,pt %xcc, do_sigaltstack | |
1431 | add %i6, STACK_BIAS, %o2 | |
1432 | #ifdef CONFIG_COMPAT | |
1433 | .globl sys32_sigstack | |
1434 | sys32_sigstack: ba,pt %xcc, do_sys32_sigstack | |
1435 | mov %i6, %o2 | |
1436 | .globl sys32_sigaltstack | |
1437 | sys32_sigaltstack: | |
1438 | ba,pt %xcc, do_sys32_sigaltstack | |
1439 | mov %i6, %o2 | |
1440 | #endif | |
1441 | .align 32 | |
1da177e4 LT |
1442 | #ifdef CONFIG_COMPAT |
1443 | .globl sys32_sigreturn | |
1444 | sys32_sigreturn: | |
1445 | add %sp, PTREGS_OFF, %o0 | |
1446 | call do_sigreturn32 | |
1447 | add %o7, 1f-.-4, %o7 | |
1448 | nop | |
1449 | #endif | |
1450 | sys_rt_sigreturn: | |
1451 | add %sp, PTREGS_OFF, %o0 | |
1452 | call do_rt_sigreturn | |
1453 | add %o7, 1f-.-4, %o7 | |
1454 | nop | |
1455 | #ifdef CONFIG_COMPAT | |
1456 | .globl sys32_rt_sigreturn | |
1457 | sys32_rt_sigreturn: | |
1458 | add %sp, PTREGS_OFF, %o0 | |
1459 | call do_rt_sigreturn32 | |
1460 | add %o7, 1f-.-4, %o7 | |
1461 | nop | |
1462 | #endif | |
1463 | sys_ptrace: add %sp, PTREGS_OFF, %o0 | |
1464 | call do_ptrace | |
1465 | add %o7, 1f-.-4, %o7 | |
1466 | nop | |
1467 | .align 32 | |
1468 | 1: ldx [%curptr + TI_FLAGS], %l5 | |
f7ceba36 | 1469 | andcc %l5, (_TIF_SYSCALL_TRACE|_TIF_SECCOMP|_TIF_SYSCALL_AUDIT), %g0 |
1da177e4 LT |
1470 | be,pt %icc, rtrap |
1471 | clr %l6 | |
8d8a6479 | 1472 | add %sp, PTREGS_OFF, %o0 |
1da177e4 | 1473 | call syscall_trace |
8d8a6479 | 1474 | mov 1, %o1 |
1da177e4 LT |
1475 | |
1476 | ba,pt %xcc, rtrap | |
1477 | clr %l6 | |
1478 | ||
1479 | /* This is how fork() was meant to be done, 8 instruction entry. | |
1480 | * | |
1481 | * I questioned the following code briefly, let me clear things | |
1482 | * up so you must not reason on it like I did. | |
1483 | * | |
1484 | * Know the fork_kpsr etc. we use in the sparc32 port? We don't | |
1485 | * need it here because the only piece of window state we copy to | |
1486 | * the child is the CWP register. Even if the parent sleeps, | |
1487 | * we are safe because we stuck it into pt_regs of the parent | |
1488 | * so it will not change. | |
1489 | * | |
1490 | * XXX This raises the question, whether we can do the same on | |
1491 | * XXX sparc32 to get rid of fork_kpsr _and_ fork_kwim. The | |
1492 | * XXX answer is yes. We stick fork_kpsr in UREG_G0 and | |
1493 | * XXX fork_kwim in UREG_G1 (global registers are considered | |
1494 | * XXX volatile across a system call in the sparc ABI I think | |
1495 | * XXX if it isn't we can use regs->y instead, anyone who depends | |
1496 | * XXX upon the Y register being preserved across a fork deserves | |
1497 | * XXX to lose). | |
1498 | * | |
1499 | * In fact we should take advantage of that fact for other things | |
1500 | * during system calls... | |
1501 | */ | |
1502 | .globl sys_fork, sys_vfork, sys_clone, sparc_exit | |
1503 | .globl ret_from_syscall | |
1504 | .align 32 | |
1505 | sys_vfork: /* Under Linux, vfork and fork are just special cases of clone. */ | |
1506 | sethi %hi(0x4000 | 0x0100 | SIGCHLD), %o0 | |
1507 | or %o0, %lo(0x4000 | 0x0100 | SIGCHLD), %o0 | |
1508 | ba,pt %xcc, sys_clone | |
1509 | sys_fork: clr %o1 | |
1510 | mov SIGCHLD, %o0 | |
1511 | sys_clone: flushw | |
1512 | movrz %o1, %fp, %o1 | |
1513 | mov 0, %o3 | |
1514 | ba,pt %xcc, sparc_do_fork | |
1515 | add %sp, PTREGS_OFF, %o2 | |
1516 | ret_from_syscall: | |
db7d9a4e DM |
1517 | /* Clear current_thread_info()->new_child, and |
1518 | * check performance counter stuff too. | |
1da177e4 | 1519 | */ |
db7d9a4e DM |
1520 | stb %g0, [%g6 + TI_NEW_CHILD] |
1521 | ldx [%g6 + TI_FLAGS], %l0 | |
1da177e4 LT |
1522 | call schedule_tail |
1523 | mov %g7, %o0 | |
1524 | andcc %l0, _TIF_PERFCTR, %g0 | |
1525 | be,pt %icc, 1f | |
1526 | nop | |
1527 | ldx [%g6 + TI_PCR], %o7 | |
1528 | wr %g0, %o7, %pcr | |
1529 | ||
1530 | /* Blackbird errata workaround. See commentary in | |
1531 | * smp.c:smp_percpu_timer_interrupt() for more | |
1532 | * information. | |
1533 | */ | |
1534 | ba,pt %xcc, 99f | |
1535 | nop | |
1536 | .align 64 | |
1537 | 99: wr %g0, %g0, %pic | |
1538 | rd %pic, %g0 | |
1539 | ||
1540 | 1: b,pt %xcc, ret_sys_call | |
1541 | ldx [%sp + PTREGS_OFF + PT_V9_I0], %o0 | |
1542 | sparc_exit: wrpr %g0, (PSTATE_RMO | PSTATE_PEF | PSTATE_PRIV), %pstate | |
1543 | rdpr %otherwin, %g1 | |
1544 | rdpr %cansave, %g3 | |
1545 | add %g3, %g1, %g3 | |
1546 | wrpr %g3, 0x0, %cansave | |
1547 | wrpr %g0, 0x0, %otherwin | |
1548 | wrpr %g0, (PSTATE_RMO | PSTATE_PEF | PSTATE_PRIV | PSTATE_IE), %pstate | |
1549 | ba,pt %xcc, sys_exit | |
1550 | stb %g0, [%g6 + TI_WSAVED] | |
1551 | ||
1552 | linux_sparc_ni_syscall: | |
1553 | sethi %hi(sys_ni_syscall), %l7 | |
1554 | b,pt %xcc, 4f | |
1555 | or %l7, %lo(sys_ni_syscall), %l7 | |
1556 | ||
1557 | linux_syscall_trace32: | |
8d8a6479 | 1558 | add %sp, PTREGS_OFF, %o0 |
1da177e4 | 1559 | call syscall_trace |
8d8a6479 | 1560 | clr %o1 |
1da177e4 | 1561 | srl %i0, 0, %o0 |
8d8a6479 | 1562 | srl %i4, 0, %o4 |
1da177e4 LT |
1563 | srl %i1, 0, %o1 |
1564 | srl %i2, 0, %o2 | |
1565 | b,pt %xcc, 2f | |
1566 | srl %i3, 0, %o3 | |
1567 | ||
1568 | linux_syscall_trace: | |
8d8a6479 | 1569 | add %sp, PTREGS_OFF, %o0 |
1da177e4 | 1570 | call syscall_trace |
8d8a6479 | 1571 | clr %o1 |
1da177e4 LT |
1572 | mov %i0, %o0 |
1573 | mov %i1, %o1 | |
1574 | mov %i2, %o2 | |
1575 | mov %i3, %o3 | |
1576 | b,pt %xcc, 2f | |
1577 | mov %i4, %o4 | |
1578 | ||
1579 | ||
1580 | /* Linux 32-bit and SunOS system calls enter here... */ | |
1581 | .align 32 | |
1582 | .globl linux_sparc_syscall32 | |
1583 | linux_sparc_syscall32: | |
1584 | /* Direct access to user regs, much faster. */ | |
1585 | cmp %g1, NR_SYSCALLS ! IEU1 Group | |
1586 | bgeu,pn %xcc, linux_sparc_ni_syscall ! CTI | |
1587 | srl %i0, 0, %o0 ! IEU0 | |
1588 | sll %g1, 2, %l4 ! IEU0 Group | |
1da177e4 LT |
1589 | srl %i4, 0, %o4 ! IEU1 |
1590 | lduw [%l7 + %l4], %l7 ! Load | |
1591 | srl %i1, 0, %o1 ! IEU0 Group | |
1592 | ldx [%curptr + TI_FLAGS], %l0 ! Load | |
1593 | ||
1594 | srl %i5, 0, %o5 ! IEU1 | |
1595 | srl %i2, 0, %o2 ! IEU0 Group | |
f7ceba36 | 1596 | andcc %l0, (_TIF_SYSCALL_TRACE|_TIF_SECCOMP|_TIF_SYSCALL_AUDIT), %g0 |
1da177e4 LT |
1597 | bne,pn %icc, linux_syscall_trace32 ! CTI |
1598 | mov %i0, %l5 ! IEU1 | |
1599 | call %l7 ! CTI Group brk forced | |
1600 | srl %i3, 0, %o3 ! IEU0 | |
1601 | ba,a,pt %xcc, 3f | |
1602 | ||
1603 | /* Linux native and SunOS system calls enter here... */ | |
1604 | .align 32 | |
1605 | .globl linux_sparc_syscall, ret_sys_call | |
1606 | linux_sparc_syscall: | |
1607 | /* Direct access to user regs, much faster. */ | |
1608 | cmp %g1, NR_SYSCALLS ! IEU1 Group | |
1609 | bgeu,pn %xcc, linux_sparc_ni_syscall ! CTI | |
1610 | mov %i0, %o0 ! IEU0 | |
1611 | sll %g1, 2, %l4 ! IEU0 Group | |
1da177e4 LT |
1612 | mov %i1, %o1 ! IEU1 |
1613 | lduw [%l7 + %l4], %l7 ! Load | |
1614 | 4: mov %i2, %o2 ! IEU0 Group | |
1615 | ldx [%curptr + TI_FLAGS], %l0 ! Load | |
1616 | ||
1617 | mov %i3, %o3 ! IEU1 | |
1618 | mov %i4, %o4 ! IEU0 Group | |
f7ceba36 | 1619 | andcc %l0, (_TIF_SYSCALL_TRACE|_TIF_SECCOMP|_TIF_SYSCALL_AUDIT), %g0 |
1da177e4 LT |
1620 | bne,pn %icc, linux_syscall_trace ! CTI Group |
1621 | mov %i0, %l5 ! IEU0 | |
1622 | 2: call %l7 ! CTI Group brk forced | |
1623 | mov %i5, %o5 ! IEU0 | |
1624 | nop | |
1625 | ||
1626 | 3: stx %o0, [%sp + PTREGS_OFF + PT_V9_I0] | |
1627 | ret_sys_call: | |
1da177e4 LT |
1628 | ldx [%sp + PTREGS_OFF + PT_V9_TSTATE], %g3 |
1629 | ldx [%sp + PTREGS_OFF + PT_V9_TNPC], %l1 ! pc = npc | |
1630 | sra %o0, 0, %o0 | |
1631 | mov %ulo(TSTATE_XCARRY | TSTATE_ICARRY), %g2 | |
1632 | sllx %g2, 32, %g2 | |
1633 | ||
1634 | /* Check if force_successful_syscall_return() | |
1635 | * was invoked. | |
1636 | */ | |
695ca07b RM |
1637 | ldub [%curptr + TI_SYS_NOERROR], %l2 |
1638 | brnz,a,pn %l2, 80f | |
db7d9a4e | 1639 | stb %g0, [%curptr + TI_SYS_NOERROR] |
1da177e4 | 1640 | |
1da177e4 LT |
1641 | cmp %o0, -ERESTART_RESTARTBLOCK |
1642 | bgeu,pn %xcc, 1f | |
f7ceba36 | 1643 | andcc %l0, (_TIF_SYSCALL_TRACE|_TIF_SECCOMP|_TIF_SYSCALL_AUDIT), %l6 |
1da177e4 LT |
1644 | 80: |
1645 | /* System call success, clear Carry condition code. */ | |
1646 | andn %g3, %g2, %g3 | |
1647 | stx %g3, [%sp + PTREGS_OFF + PT_V9_TSTATE] | |
1648 | bne,pn %icc, linux_syscall_trace2 | |
1649 | add %l1, 0x4, %l2 ! npc = npc+4 | |
1650 | stx %l1, [%sp + PTREGS_OFF + PT_V9_TPC] | |
1651 | ba,pt %xcc, rtrap_clr_l6 | |
1652 | stx %l2, [%sp + PTREGS_OFF + PT_V9_TNPC] | |
1653 | ||
1654 | 1: | |
1655 | /* System call failure, set Carry condition code. | |
1656 | * Also, get abs(errno) to return to the process. | |
1657 | */ | |
f7ceba36 | 1658 | andcc %l0, (_TIF_SYSCALL_TRACE|_TIF_SECCOMP|_TIF_SYSCALL_AUDIT), %l6 |
1da177e4 LT |
1659 | sub %g0, %o0, %o0 |
1660 | or %g3, %g2, %g3 | |
1661 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I0] | |
1662 | mov 1, %l6 | |
1663 | stx %g3, [%sp + PTREGS_OFF + PT_V9_TSTATE] | |
1664 | bne,pn %icc, linux_syscall_trace2 | |
1665 | add %l1, 0x4, %l2 ! npc = npc+4 | |
1666 | stx %l1, [%sp + PTREGS_OFF + PT_V9_TPC] | |
1667 | ||
1668 | b,pt %xcc, rtrap | |
1669 | stx %l2, [%sp + PTREGS_OFF + PT_V9_TNPC] | |
1670 | linux_syscall_trace2: | |
8d8a6479 | 1671 | add %sp, PTREGS_OFF, %o0 |
1da177e4 | 1672 | call syscall_trace |
8d8a6479 | 1673 | mov 1, %o1 |
1da177e4 LT |
1674 | stx %l1, [%sp + PTREGS_OFF + PT_V9_TPC] |
1675 | ba,pt %xcc, rtrap | |
1676 | stx %l2, [%sp + PTREGS_OFF + PT_V9_TNPC] | |
1677 | ||
1678 | .align 32 | |
1679 | .globl __flushw_user | |
1680 | __flushw_user: | |
1681 | rdpr %otherwin, %g1 | |
1682 | brz,pn %g1, 2f | |
1683 | clr %g2 | |
1684 | 1: save %sp, -128, %sp | |
1685 | rdpr %otherwin, %g1 | |
1686 | brnz,pt %g1, 1b | |
1687 | add %g2, 1, %g2 | |
1688 | 1: sub %g2, 1, %g2 | |
1689 | brnz,pt %g2, 1b | |
1690 | restore %g0, %g0, %g0 | |
1691 | 2: retl | |
1692 | nop |