Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | /* $Id: entry.S,v 1.144 2002/02/09 19:49:30 davem Exp $ |
2 | * arch/sparc64/kernel/entry.S: Sparc64 trap low-level entry points. | |
3 | * | |
4 | * Copyright (C) 1995,1997 David S. Miller (davem@caip.rutgers.edu) | |
5 | * Copyright (C) 1996 Eddie C. Dost (ecd@skynet.be) | |
6 | * Copyright (C) 1996 Miguel de Icaza (miguel@nuclecu.unam.mx) | |
7 | * Copyright (C) 1996,98,99 Jakub Jelinek (jj@sunsite.mff.cuni.cz) | |
8 | */ | |
9 | ||
10 | #include <linux/config.h> | |
11 | #include <linux/errno.h> | |
12 | ||
13 | #include <asm/head.h> | |
14 | #include <asm/asi.h> | |
15 | #include <asm/smp.h> | |
16 | #include <asm/ptrace.h> | |
17 | #include <asm/page.h> | |
18 | #include <asm/signal.h> | |
19 | #include <asm/pgtable.h> | |
20 | #include <asm/processor.h> | |
21 | #include <asm/visasm.h> | |
22 | #include <asm/estate.h> | |
23 | #include <asm/auxio.h> | |
24 | ||
25 | /* #define SYSCALL_TRACING 1 */ | |
26 | ||
27 | #define curptr g6 | |
28 | ||
29 | #define NR_SYSCALLS 284 /* Each OS is different... */ | |
30 | ||
31 | .text | |
32 | .align 32 | |
33 | ||
34 | .globl sparc64_vpte_patchme1 | |
35 | .globl sparc64_vpte_patchme2 | |
36 | /* | |
37 | * On a second level vpte miss, check whether the original fault is to the OBP | |
38 | * range (note that this is only possible for instruction miss, data misses to | |
39 | * obp range do not use vpte). If so, go back directly to the faulting address. | |
40 | * This is because we want to read the tpc, otherwise we have no way of knowing | |
41 | * the 8k aligned faulting address if we are using >8k kernel pagesize. This | |
42 | * also ensures no vpte range addresses are dropped into tlb while obp is | |
43 | * executing (see inherit_locked_prom_mappings() rant). | |
44 | */ | |
45 | sparc64_vpte_nucleus: | |
46 | /* Load 0xf0000000, which is LOW_OBP_ADDRESS. */ | |
47 | mov 0xf, %g5 | |
48 | sllx %g5, 28, %g5 | |
49 | ||
50 | /* Is addr >= LOW_OBP_ADDRESS? */ | |
51 | cmp %g4, %g5 | |
52 | blu,pn %xcc, sparc64_vpte_patchme1 | |
53 | mov 0x1, %g5 | |
54 | ||
55 | /* Load 0x100000000, which is HI_OBP_ADDRESS. */ | |
56 | sllx %g5, 32, %g5 | |
57 | ||
58 | /* Is addr < HI_OBP_ADDRESS? */ | |
59 | cmp %g4, %g5 | |
60 | blu,pn %xcc, obp_iaddr_patch | |
61 | nop | |
62 | ||
63 | /* These two instructions are patched by paginig_init(). */ | |
64 | sparc64_vpte_patchme1: | |
65 | sethi %hi(0), %g5 | |
66 | sparc64_vpte_patchme2: | |
67 | or %g5, %lo(0), %g5 | |
68 | ||
69 | /* With kernel PGD in %g5, branch back into dtlb_backend. */ | |
70 | ba,pt %xcc, sparc64_kpte_continue | |
71 | andn %g1, 0x3, %g1 /* Finish PMD offset adjustment. */ | |
72 | ||
73 | vpte_noent: | |
74 | /* Restore previous TAG_ACCESS, %g5 is zero, and we will | |
75 | * skip over the trap instruction so that the top level | |
76 | * TLB miss handler will thing this %g5 value is just an | |
77 | * invalid PTE, thus branching to full fault processing. | |
78 | */ | |
79 | mov TLB_SFSR, %g1 | |
80 | stxa %g4, [%g1 + %g1] ASI_DMMU | |
81 | done | |
82 | ||
83 | .globl obp_iaddr_patch | |
84 | obp_iaddr_patch: | |
85 | /* These two instructions patched by inherit_prom_mappings(). */ | |
86 | sethi %hi(0), %g5 | |
87 | or %g5, %lo(0), %g5 | |
88 | ||
89 | /* Behave as if we are at TL0. */ | |
90 | wrpr %g0, 1, %tl | |
91 | rdpr %tpc, %g4 /* Find original faulting iaddr */ | |
92 | srlx %g4, 13, %g4 /* Throw out context bits */ | |
93 | sllx %g4, 13, %g4 /* g4 has vpn + ctx0 now */ | |
94 | ||
95 | /* Restore previous TAG_ACCESS. */ | |
96 | mov TLB_SFSR, %g1 | |
97 | stxa %g4, [%g1 + %g1] ASI_IMMU | |
98 | ||
99 | /* Get PMD offset. */ | |
100 | srlx %g4, 23, %g6 | |
101 | and %g6, 0x7ff, %g6 | |
102 | sllx %g6, 2, %g6 | |
103 | ||
104 | /* Load PMD, is it valid? */ | |
105 | lduwa [%g5 + %g6] ASI_PHYS_USE_EC, %g5 | |
106 | brz,pn %g5, longpath | |
107 | sllx %g5, 11, %g5 | |
108 | ||
109 | /* Get PTE offset. */ | |
110 | srlx %g4, 13, %g6 | |
111 | and %g6, 0x3ff, %g6 | |
112 | sllx %g6, 3, %g6 | |
113 | ||
114 | /* Load PTE. */ | |
115 | ldxa [%g5 + %g6] ASI_PHYS_USE_EC, %g5 | |
116 | brgez,pn %g5, longpath | |
117 | nop | |
118 | ||
119 | /* TLB load and return from trap. */ | |
120 | stxa %g5, [%g0] ASI_ITLB_DATA_IN | |
121 | retry | |
122 | ||
123 | .globl obp_daddr_patch | |
124 | obp_daddr_patch: | |
125 | /* These two instructions patched by inherit_prom_mappings(). */ | |
126 | sethi %hi(0), %g5 | |
127 | or %g5, %lo(0), %g5 | |
128 | ||
129 | /* Get PMD offset. */ | |
130 | srlx %g4, 23, %g6 | |
131 | and %g6, 0x7ff, %g6 | |
132 | sllx %g6, 2, %g6 | |
133 | ||
134 | /* Load PMD, is it valid? */ | |
135 | lduwa [%g5 + %g6] ASI_PHYS_USE_EC, %g5 | |
136 | brz,pn %g5, longpath | |
137 | sllx %g5, 11, %g5 | |
138 | ||
139 | /* Get PTE offset. */ | |
140 | srlx %g4, 13, %g6 | |
141 | and %g6, 0x3ff, %g6 | |
142 | sllx %g6, 3, %g6 | |
143 | ||
144 | /* Load PTE. */ | |
145 | ldxa [%g5 + %g6] ASI_PHYS_USE_EC, %g5 | |
146 | brgez,pn %g5, longpath | |
147 | nop | |
148 | ||
149 | /* TLB load and return from trap. */ | |
150 | stxa %g5, [%g0] ASI_DTLB_DATA_IN | |
151 | retry | |
152 | ||
153 | /* | |
154 | * On a first level data miss, check whether this is to the OBP range (note | |
155 | * that such accesses can be made by prom, as well as by kernel using | |
156 | * prom_getproperty on "address"), and if so, do not use vpte access ... | |
157 | * rather, use information saved during inherit_prom_mappings() using 8k | |
158 | * pagesize. | |
159 | */ | |
160 | kvmap: | |
161 | /* Load 0xf0000000, which is LOW_OBP_ADDRESS. */ | |
162 | mov 0xf, %g5 | |
163 | sllx %g5, 28, %g5 | |
164 | ||
165 | /* Is addr >= LOW_OBP_ADDRESS? */ | |
166 | cmp %g4, %g5 | |
167 | blu,pn %xcc, vmalloc_addr | |
168 | mov 0x1, %g5 | |
169 | ||
170 | /* Load 0x100000000, which is HI_OBP_ADDRESS. */ | |
171 | sllx %g5, 32, %g5 | |
172 | ||
173 | /* Is addr < HI_OBP_ADDRESS? */ | |
174 | cmp %g4, %g5 | |
175 | blu,pn %xcc, obp_daddr_patch | |
176 | nop | |
177 | ||
178 | vmalloc_addr: | |
179 | /* If we get here, a vmalloc addr accessed, load kernel VPTE. */ | |
180 | ldxa [%g3 + %g6] ASI_N, %g5 | |
181 | brgez,pn %g5, longpath | |
182 | nop | |
183 | ||
184 | /* PTE is valid, load into TLB and return from trap. */ | |
185 | stxa %g5, [%g0] ASI_DTLB_DATA_IN ! Reload TLB | |
186 | retry | |
187 | ||
188 | /* This is trivial with the new code... */ | |
189 | .globl do_fpdis | |
190 | do_fpdis: | |
191 | sethi %hi(TSTATE_PEF), %g4 ! IEU0 | |
192 | rdpr %tstate, %g5 | |
193 | andcc %g5, %g4, %g0 | |
194 | be,pt %xcc, 1f | |
195 | nop | |
196 | rd %fprs, %g5 | |
197 | andcc %g5, FPRS_FEF, %g0 | |
198 | be,pt %xcc, 1f | |
199 | nop | |
200 | ||
201 | /* Legal state when DCR_IFPOE is set in Cheetah %dcr. */ | |
202 | sethi %hi(109f), %g7 | |
203 | ba,pt %xcc, etrap | |
204 | 109: or %g7, %lo(109b), %g7 | |
205 | add %g0, %g0, %g0 | |
206 | ba,a,pt %xcc, rtrap_clr_l6 | |
207 | ||
208 | 1: ldub [%g6 + TI_FPSAVED], %g5 ! Load Group | |
209 | wr %g0, FPRS_FEF, %fprs ! LSU Group+4bubbles | |
210 | andcc %g5, FPRS_FEF, %g0 ! IEU1 Group | |
211 | be,a,pt %icc, 1f ! CTI | |
212 | clr %g7 ! IEU0 | |
213 | ldx [%g6 + TI_GSR], %g7 ! Load Group | |
214 | 1: andcc %g5, FPRS_DL, %g0 ! IEU1 | |
215 | bne,pn %icc, 2f ! CTI | |
216 | fzero %f0 ! FPA | |
217 | andcc %g5, FPRS_DU, %g0 ! IEU1 Group | |
218 | bne,pn %icc, 1f ! CTI | |
219 | fzero %f2 ! FPA | |
220 | faddd %f0, %f2, %f4 | |
221 | fmuld %f0, %f2, %f6 | |
222 | faddd %f0, %f2, %f8 | |
223 | fmuld %f0, %f2, %f10 | |
224 | faddd %f0, %f2, %f12 | |
225 | fmuld %f0, %f2, %f14 | |
226 | faddd %f0, %f2, %f16 | |
227 | fmuld %f0, %f2, %f18 | |
228 | faddd %f0, %f2, %f20 | |
229 | fmuld %f0, %f2, %f22 | |
230 | faddd %f0, %f2, %f24 | |
231 | fmuld %f0, %f2, %f26 | |
232 | faddd %f0, %f2, %f28 | |
233 | fmuld %f0, %f2, %f30 | |
234 | faddd %f0, %f2, %f32 | |
235 | fmuld %f0, %f2, %f34 | |
236 | faddd %f0, %f2, %f36 | |
237 | fmuld %f0, %f2, %f38 | |
238 | faddd %f0, %f2, %f40 | |
239 | fmuld %f0, %f2, %f42 | |
240 | faddd %f0, %f2, %f44 | |
241 | fmuld %f0, %f2, %f46 | |
242 | faddd %f0, %f2, %f48 | |
243 | fmuld %f0, %f2, %f50 | |
244 | faddd %f0, %f2, %f52 | |
245 | fmuld %f0, %f2, %f54 | |
246 | faddd %f0, %f2, %f56 | |
247 | fmuld %f0, %f2, %f58 | |
248 | b,pt %xcc, fpdis_exit2 | |
249 | faddd %f0, %f2, %f60 | |
250 | 1: mov SECONDARY_CONTEXT, %g3 | |
251 | add %g6, TI_FPREGS + 0x80, %g1 | |
252 | faddd %f0, %f2, %f4 | |
253 | fmuld %f0, %f2, %f6 | |
254 | ldxa [%g3] ASI_DMMU, %g5 | |
255 | cplus_fptrap_insn_1: | |
256 | sethi %hi(0), %g2 | |
257 | stxa %g2, [%g3] ASI_DMMU | |
258 | membar #Sync | |
259 | add %g6, TI_FPREGS + 0xc0, %g2 | |
260 | faddd %f0, %f2, %f8 | |
261 | fmuld %f0, %f2, %f10 | |
262 | ldda [%g1] ASI_BLK_S, %f32 ! grrr, where is ASI_BLK_NUCLEUS 8-( | |
263 | ldda [%g2] ASI_BLK_S, %f48 | |
264 | faddd %f0, %f2, %f12 | |
265 | fmuld %f0, %f2, %f14 | |
266 | faddd %f0, %f2, %f16 | |
267 | fmuld %f0, %f2, %f18 | |
268 | faddd %f0, %f2, %f20 | |
269 | fmuld %f0, %f2, %f22 | |
270 | faddd %f0, %f2, %f24 | |
271 | fmuld %f0, %f2, %f26 | |
272 | faddd %f0, %f2, %f28 | |
273 | fmuld %f0, %f2, %f30 | |
b445e26c | 274 | membar #Sync |
1da177e4 | 275 | b,pt %xcc, fpdis_exit |
b445e26c | 276 | nop |
1da177e4 LT |
277 | 2: andcc %g5, FPRS_DU, %g0 |
278 | bne,pt %icc, 3f | |
279 | fzero %f32 | |
280 | mov SECONDARY_CONTEXT, %g3 | |
281 | fzero %f34 | |
282 | ldxa [%g3] ASI_DMMU, %g5 | |
283 | add %g6, TI_FPREGS, %g1 | |
284 | cplus_fptrap_insn_2: | |
285 | sethi %hi(0), %g2 | |
286 | stxa %g2, [%g3] ASI_DMMU | |
287 | membar #Sync | |
288 | add %g6, TI_FPREGS + 0x40, %g2 | |
289 | faddd %f32, %f34, %f36 | |
290 | fmuld %f32, %f34, %f38 | |
291 | ldda [%g1] ASI_BLK_S, %f0 ! grrr, where is ASI_BLK_NUCLEUS 8-( | |
292 | ldda [%g2] ASI_BLK_S, %f16 | |
293 | faddd %f32, %f34, %f40 | |
294 | fmuld %f32, %f34, %f42 | |
295 | faddd %f32, %f34, %f44 | |
296 | fmuld %f32, %f34, %f46 | |
297 | faddd %f32, %f34, %f48 | |
298 | fmuld %f32, %f34, %f50 | |
299 | faddd %f32, %f34, %f52 | |
300 | fmuld %f32, %f34, %f54 | |
301 | faddd %f32, %f34, %f56 | |
302 | fmuld %f32, %f34, %f58 | |
303 | faddd %f32, %f34, %f60 | |
304 | fmuld %f32, %f34, %f62 | |
b445e26c | 305 | membar #Sync |
1da177e4 | 306 | ba,pt %xcc, fpdis_exit |
b445e26c | 307 | nop |
1da177e4 LT |
308 | 3: mov SECONDARY_CONTEXT, %g3 |
309 | add %g6, TI_FPREGS, %g1 | |
310 | ldxa [%g3] ASI_DMMU, %g5 | |
311 | cplus_fptrap_insn_3: | |
312 | sethi %hi(0), %g2 | |
313 | stxa %g2, [%g3] ASI_DMMU | |
314 | membar #Sync | |
315 | mov 0x40, %g2 | |
316 | ldda [%g1] ASI_BLK_S, %f0 ! grrr, where is ASI_BLK_NUCLEUS 8-( | |
317 | ldda [%g1 + %g2] ASI_BLK_S, %f16 | |
318 | add %g1, 0x80, %g1 | |
319 | ldda [%g1] ASI_BLK_S, %f32 | |
320 | ldda [%g1 + %g2] ASI_BLK_S, %f48 | |
321 | membar #Sync | |
322 | fpdis_exit: | |
323 | stxa %g5, [%g3] ASI_DMMU | |
324 | membar #Sync | |
325 | fpdis_exit2: | |
326 | wr %g7, 0, %gsr | |
327 | ldx [%g6 + TI_XFSR], %fsr | |
328 | rdpr %tstate, %g3 | |
329 | or %g3, %g4, %g3 ! anal... | |
330 | wrpr %g3, %tstate | |
331 | wr %g0, FPRS_FEF, %fprs ! clean DU/DL bits | |
332 | retry | |
333 | ||
334 | .align 32 | |
335 | fp_other_bounce: | |
336 | call do_fpother | |
337 | add %sp, PTREGS_OFF, %o0 | |
338 | ba,pt %xcc, rtrap | |
339 | clr %l6 | |
340 | ||
341 | .globl do_fpother_check_fitos | |
342 | .align 32 | |
343 | do_fpother_check_fitos: | |
344 | sethi %hi(fp_other_bounce - 4), %g7 | |
345 | or %g7, %lo(fp_other_bounce - 4), %g7 | |
346 | ||
347 | /* NOTE: Need to preserve %g7 until we fully commit | |
348 | * to the fitos fixup. | |
349 | */ | |
350 | stx %fsr, [%g6 + TI_XFSR] | |
351 | rdpr %tstate, %g3 | |
352 | andcc %g3, TSTATE_PRIV, %g0 | |
353 | bne,pn %xcc, do_fptrap_after_fsr | |
354 | nop | |
355 | ldx [%g6 + TI_XFSR], %g3 | |
356 | srlx %g3, 14, %g1 | |
357 | and %g1, 7, %g1 | |
358 | cmp %g1, 2 ! Unfinished FP-OP | |
359 | bne,pn %xcc, do_fptrap_after_fsr | |
360 | sethi %hi(1 << 23), %g1 ! Inexact | |
361 | andcc %g3, %g1, %g0 | |
362 | bne,pn %xcc, do_fptrap_after_fsr | |
363 | rdpr %tpc, %g1 | |
364 | lduwa [%g1] ASI_AIUP, %g3 ! This cannot ever fail | |
365 | #define FITOS_MASK 0xc1f83fe0 | |
366 | #define FITOS_COMPARE 0x81a01880 | |
367 | sethi %hi(FITOS_MASK), %g1 | |
368 | or %g1, %lo(FITOS_MASK), %g1 | |
369 | and %g3, %g1, %g1 | |
370 | sethi %hi(FITOS_COMPARE), %g2 | |
371 | or %g2, %lo(FITOS_COMPARE), %g2 | |
372 | cmp %g1, %g2 | |
373 | bne,pn %xcc, do_fptrap_after_fsr | |
374 | nop | |
375 | std %f62, [%g6 + TI_FPREGS + (62 * 4)] | |
376 | sethi %hi(fitos_table_1), %g1 | |
377 | and %g3, 0x1f, %g2 | |
378 | or %g1, %lo(fitos_table_1), %g1 | |
379 | sllx %g2, 2, %g2 | |
380 | jmpl %g1 + %g2, %g0 | |
381 | ba,pt %xcc, fitos_emul_continue | |
382 | ||
383 | fitos_table_1: | |
384 | fitod %f0, %f62 | |
385 | fitod %f1, %f62 | |
386 | fitod %f2, %f62 | |
387 | fitod %f3, %f62 | |
388 | fitod %f4, %f62 | |
389 | fitod %f5, %f62 | |
390 | fitod %f6, %f62 | |
391 | fitod %f7, %f62 | |
392 | fitod %f8, %f62 | |
393 | fitod %f9, %f62 | |
394 | fitod %f10, %f62 | |
395 | fitod %f11, %f62 | |
396 | fitod %f12, %f62 | |
397 | fitod %f13, %f62 | |
398 | fitod %f14, %f62 | |
399 | fitod %f15, %f62 | |
400 | fitod %f16, %f62 | |
401 | fitod %f17, %f62 | |
402 | fitod %f18, %f62 | |
403 | fitod %f19, %f62 | |
404 | fitod %f20, %f62 | |
405 | fitod %f21, %f62 | |
406 | fitod %f22, %f62 | |
407 | fitod %f23, %f62 | |
408 | fitod %f24, %f62 | |
409 | fitod %f25, %f62 | |
410 | fitod %f26, %f62 | |
411 | fitod %f27, %f62 | |
412 | fitod %f28, %f62 | |
413 | fitod %f29, %f62 | |
414 | fitod %f30, %f62 | |
415 | fitod %f31, %f62 | |
416 | ||
417 | fitos_emul_continue: | |
418 | sethi %hi(fitos_table_2), %g1 | |
419 | srl %g3, 25, %g2 | |
420 | or %g1, %lo(fitos_table_2), %g1 | |
421 | and %g2, 0x1f, %g2 | |
422 | sllx %g2, 2, %g2 | |
423 | jmpl %g1 + %g2, %g0 | |
424 | ba,pt %xcc, fitos_emul_fini | |
425 | ||
426 | fitos_table_2: | |
427 | fdtos %f62, %f0 | |
428 | fdtos %f62, %f1 | |
429 | fdtos %f62, %f2 | |
430 | fdtos %f62, %f3 | |
431 | fdtos %f62, %f4 | |
432 | fdtos %f62, %f5 | |
433 | fdtos %f62, %f6 | |
434 | fdtos %f62, %f7 | |
435 | fdtos %f62, %f8 | |
436 | fdtos %f62, %f9 | |
437 | fdtos %f62, %f10 | |
438 | fdtos %f62, %f11 | |
439 | fdtos %f62, %f12 | |
440 | fdtos %f62, %f13 | |
441 | fdtos %f62, %f14 | |
442 | fdtos %f62, %f15 | |
443 | fdtos %f62, %f16 | |
444 | fdtos %f62, %f17 | |
445 | fdtos %f62, %f18 | |
446 | fdtos %f62, %f19 | |
447 | fdtos %f62, %f20 | |
448 | fdtos %f62, %f21 | |
449 | fdtos %f62, %f22 | |
450 | fdtos %f62, %f23 | |
451 | fdtos %f62, %f24 | |
452 | fdtos %f62, %f25 | |
453 | fdtos %f62, %f26 | |
454 | fdtos %f62, %f27 | |
455 | fdtos %f62, %f28 | |
456 | fdtos %f62, %f29 | |
457 | fdtos %f62, %f30 | |
458 | fdtos %f62, %f31 | |
459 | ||
460 | fitos_emul_fini: | |
461 | ldd [%g6 + TI_FPREGS + (62 * 4)], %f62 | |
462 | done | |
463 | ||
464 | .globl do_fptrap | |
465 | .align 32 | |
466 | do_fptrap: | |
467 | stx %fsr, [%g6 + TI_XFSR] | |
468 | do_fptrap_after_fsr: | |
469 | ldub [%g6 + TI_FPSAVED], %g3 | |
470 | rd %fprs, %g1 | |
471 | or %g3, %g1, %g3 | |
472 | stb %g3, [%g6 + TI_FPSAVED] | |
473 | rd %gsr, %g3 | |
474 | stx %g3, [%g6 + TI_GSR] | |
475 | mov SECONDARY_CONTEXT, %g3 | |
476 | ldxa [%g3] ASI_DMMU, %g5 | |
477 | cplus_fptrap_insn_4: | |
478 | sethi %hi(0), %g2 | |
479 | stxa %g2, [%g3] ASI_DMMU | |
480 | membar #Sync | |
481 | add %g6, TI_FPREGS, %g2 | |
482 | andcc %g1, FPRS_DL, %g0 | |
483 | be,pn %icc, 4f | |
484 | mov 0x40, %g3 | |
485 | stda %f0, [%g2] ASI_BLK_S | |
486 | stda %f16, [%g2 + %g3] ASI_BLK_S | |
487 | andcc %g1, FPRS_DU, %g0 | |
488 | be,pn %icc, 5f | |
489 | 4: add %g2, 128, %g2 | |
490 | stda %f32, [%g2] ASI_BLK_S | |
491 | stda %f48, [%g2 + %g3] ASI_BLK_S | |
492 | 5: mov SECONDARY_CONTEXT, %g1 | |
493 | membar #Sync | |
494 | stxa %g5, [%g1] ASI_DMMU | |
495 | membar #Sync | |
496 | ba,pt %xcc, etrap | |
497 | wr %g0, 0, %fprs | |
498 | ||
499 | cplus_fptrap_1: | |
500 | sethi %hi(CTX_CHEETAH_PLUS_CTX0), %g2 | |
501 | ||
502 | .globl cheetah_plus_patch_fpdis | |
503 | cheetah_plus_patch_fpdis: | |
504 | /* We configure the dTLB512_0 for 4MB pages and the | |
505 | * dTLB512_1 for 8K pages when in context zero. | |
506 | */ | |
507 | sethi %hi(cplus_fptrap_1), %o0 | |
508 | lduw [%o0 + %lo(cplus_fptrap_1)], %o1 | |
509 | ||
510 | set cplus_fptrap_insn_1, %o2 | |
511 | stw %o1, [%o2] | |
512 | flush %o2 | |
513 | set cplus_fptrap_insn_2, %o2 | |
514 | stw %o1, [%o2] | |
515 | flush %o2 | |
516 | set cplus_fptrap_insn_3, %o2 | |
517 | stw %o1, [%o2] | |
518 | flush %o2 | |
519 | set cplus_fptrap_insn_4, %o2 | |
520 | stw %o1, [%o2] | |
521 | flush %o2 | |
522 | ||
523 | retl | |
524 | nop | |
525 | ||
526 | /* The registers for cross calls will be: | |
527 | * | |
528 | * DATA 0: [low 32-bits] Address of function to call, jmp to this | |
529 | * [high 32-bits] MMU Context Argument 0, place in %g5 | |
530 | * DATA 1: Address Argument 1, place in %g6 | |
531 | * DATA 2: Address Argument 2, place in %g7 | |
532 | * | |
533 | * With this method we can do most of the cross-call tlb/cache | |
534 | * flushing very quickly. | |
535 | * | |
536 | * Current CPU's IRQ worklist table is locked into %g1, | |
537 | * don't touch. | |
538 | */ | |
539 | .text | |
540 | .align 32 | |
541 | .globl do_ivec | |
542 | do_ivec: | |
543 | mov 0x40, %g3 | |
544 | ldxa [%g3 + %g0] ASI_INTR_R, %g3 | |
545 | sethi %hi(KERNBASE), %g4 | |
546 | cmp %g3, %g4 | |
547 | bgeu,pn %xcc, do_ivec_xcall | |
548 | srlx %g3, 32, %g5 | |
549 | stxa %g0, [%g0] ASI_INTR_RECEIVE | |
550 | membar #Sync | |
551 | ||
552 | sethi %hi(ivector_table), %g2 | |
553 | sllx %g3, 5, %g3 | |
554 | or %g2, %lo(ivector_table), %g2 | |
555 | add %g2, %g3, %g3 | |
1da177e4 | 556 | ldub [%g3 + 0x04], %g4 /* pil */ |
088dd1f8 | 557 | mov 1, %g2 |
1da177e4 LT |
558 | sllx %g2, %g4, %g2 |
559 | sllx %g4, 2, %g4 | |
088dd1f8 | 560 | |
1da177e4 LT |
561 | lduw [%g6 + %g4], %g5 /* g5 = irq_work(cpu, pil) */ |
562 | stw %g5, [%g3 + 0x00] /* bucket->irq_chain = g5 */ | |
563 | stw %g3, [%g6 + %g4] /* irq_work(cpu, pil) = bucket */ | |
564 | wr %g2, 0x0, %set_softint | |
565 | retry | |
566 | do_ivec_xcall: | |
567 | mov 0x50, %g1 | |
1da177e4 LT |
568 | ldxa [%g1 + %g0] ASI_INTR_R, %g1 |
569 | srl %g3, 0, %g3 | |
088dd1f8 | 570 | |
1da177e4 LT |
571 | mov 0x60, %g7 |
572 | ldxa [%g7 + %g0] ASI_INTR_R, %g7 | |
573 | stxa %g0, [%g0] ASI_INTR_RECEIVE | |
574 | membar #Sync | |
575 | ba,pt %xcc, 1f | |
576 | nop | |
577 | ||
578 | .align 32 | |
579 | 1: jmpl %g3, %g0 | |
580 | nop | |
581 | ||
1da177e4 LT |
582 | .globl save_alternate_globals |
583 | save_alternate_globals: /* %o0 = save_area */ | |
584 | rdpr %pstate, %o5 | |
585 | andn %o5, PSTATE_IE, %o1 | |
586 | wrpr %o1, PSTATE_AG, %pstate | |
587 | stx %g0, [%o0 + 0x00] | |
588 | stx %g1, [%o0 + 0x08] | |
589 | stx %g2, [%o0 + 0x10] | |
590 | stx %g3, [%o0 + 0x18] | |
591 | stx %g4, [%o0 + 0x20] | |
592 | stx %g5, [%o0 + 0x28] | |
593 | stx %g6, [%o0 + 0x30] | |
594 | stx %g7, [%o0 + 0x38] | |
595 | wrpr %o1, PSTATE_IG, %pstate | |
596 | stx %g0, [%o0 + 0x40] | |
597 | stx %g1, [%o0 + 0x48] | |
598 | stx %g2, [%o0 + 0x50] | |
599 | stx %g3, [%o0 + 0x58] | |
600 | stx %g4, [%o0 + 0x60] | |
601 | stx %g5, [%o0 + 0x68] | |
602 | stx %g6, [%o0 + 0x70] | |
603 | stx %g7, [%o0 + 0x78] | |
604 | wrpr %o1, PSTATE_MG, %pstate | |
605 | stx %g0, [%o0 + 0x80] | |
606 | stx %g1, [%o0 + 0x88] | |
607 | stx %g2, [%o0 + 0x90] | |
608 | stx %g3, [%o0 + 0x98] | |
609 | stx %g4, [%o0 + 0xa0] | |
610 | stx %g5, [%o0 + 0xa8] | |
611 | stx %g6, [%o0 + 0xb0] | |
612 | stx %g7, [%o0 + 0xb8] | |
613 | wrpr %o5, 0x0, %pstate | |
614 | retl | |
615 | nop | |
616 | ||
617 | .globl restore_alternate_globals | |
618 | restore_alternate_globals: /* %o0 = save_area */ | |
619 | rdpr %pstate, %o5 | |
620 | andn %o5, PSTATE_IE, %o1 | |
621 | wrpr %o1, PSTATE_AG, %pstate | |
622 | ldx [%o0 + 0x00], %g0 | |
623 | ldx [%o0 + 0x08], %g1 | |
624 | ldx [%o0 + 0x10], %g2 | |
625 | ldx [%o0 + 0x18], %g3 | |
626 | ldx [%o0 + 0x20], %g4 | |
627 | ldx [%o0 + 0x28], %g5 | |
628 | ldx [%o0 + 0x30], %g6 | |
629 | ldx [%o0 + 0x38], %g7 | |
630 | wrpr %o1, PSTATE_IG, %pstate | |
631 | ldx [%o0 + 0x40], %g0 | |
632 | ldx [%o0 + 0x48], %g1 | |
633 | ldx [%o0 + 0x50], %g2 | |
634 | ldx [%o0 + 0x58], %g3 | |
635 | ldx [%o0 + 0x60], %g4 | |
636 | ldx [%o0 + 0x68], %g5 | |
637 | ldx [%o0 + 0x70], %g6 | |
638 | ldx [%o0 + 0x78], %g7 | |
639 | wrpr %o1, PSTATE_MG, %pstate | |
640 | ldx [%o0 + 0x80], %g0 | |
641 | ldx [%o0 + 0x88], %g1 | |
642 | ldx [%o0 + 0x90], %g2 | |
643 | ldx [%o0 + 0x98], %g3 | |
644 | ldx [%o0 + 0xa0], %g4 | |
645 | ldx [%o0 + 0xa8], %g5 | |
646 | ldx [%o0 + 0xb0], %g6 | |
647 | ldx [%o0 + 0xb8], %g7 | |
648 | wrpr %o5, 0x0, %pstate | |
649 | retl | |
650 | nop | |
651 | ||
652 | .globl getcc, setcc | |
653 | getcc: | |
654 | ldx [%o0 + PT_V9_TSTATE], %o1 | |
655 | srlx %o1, 32, %o1 | |
656 | and %o1, 0xf, %o1 | |
657 | retl | |
658 | stx %o1, [%o0 + PT_V9_G1] | |
659 | setcc: | |
660 | ldx [%o0 + PT_V9_TSTATE], %o1 | |
661 | ldx [%o0 + PT_V9_G1], %o2 | |
662 | or %g0, %ulo(TSTATE_ICC), %o3 | |
663 | sllx %o3, 32, %o3 | |
664 | andn %o1, %o3, %o1 | |
665 | sllx %o2, 32, %o2 | |
666 | and %o2, %o3, %o2 | |
667 | or %o1, %o2, %o1 | |
668 | retl | |
669 | stx %o1, [%o0 + PT_V9_TSTATE] | |
670 | ||
671 | .globl utrap, utrap_ill | |
672 | utrap: brz,pn %g1, etrap | |
673 | nop | |
674 | save %sp, -128, %sp | |
675 | rdpr %tstate, %l6 | |
676 | rdpr %cwp, %l7 | |
677 | andn %l6, TSTATE_CWP, %l6 | |
678 | wrpr %l6, %l7, %tstate | |
679 | rdpr %tpc, %l6 | |
680 | rdpr %tnpc, %l7 | |
681 | wrpr %g1, 0, %tnpc | |
682 | done | |
683 | utrap_ill: | |
684 | call bad_trap | |
685 | add %sp, PTREGS_OFF, %o0 | |
686 | ba,pt %xcc, rtrap | |
687 | clr %l6 | |
688 | ||
1da177e4 LT |
689 | /* XXX Here is stuff we still need to write... -DaveM XXX */ |
690 | .globl netbsd_syscall | |
691 | netbsd_syscall: | |
692 | retl | |
693 | nop | |
694 | ||
695 | /* These next few routines must be sure to clear the | |
696 | * SFSR FaultValid bit so that the fast tlb data protection | |
697 | * handler does not flush the wrong context and lock up the | |
698 | * box. | |
699 | */ | |
700 | .globl __do_data_access_exception | |
701 | .globl __do_data_access_exception_tl1 | |
702 | __do_data_access_exception_tl1: | |
703 | rdpr %pstate, %g4 | |
704 | wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate | |
705 | mov TLB_SFSR, %g3 | |
706 | mov DMMU_SFAR, %g5 | |
707 | ldxa [%g3] ASI_DMMU, %g4 ! Get SFSR | |
708 | ldxa [%g5] ASI_DMMU, %g5 ! Get SFAR | |
709 | stxa %g0, [%g3] ASI_DMMU ! Clear SFSR.FaultValid bit | |
710 | membar #Sync | |
711 | ba,pt %xcc, winfix_dax | |
712 | rdpr %tpc, %g3 | |
713 | __do_data_access_exception: | |
714 | rdpr %pstate, %g4 | |
715 | wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate | |
716 | mov TLB_SFSR, %g3 | |
717 | mov DMMU_SFAR, %g5 | |
718 | ldxa [%g3] ASI_DMMU, %g4 ! Get SFSR | |
719 | ldxa [%g5] ASI_DMMU, %g5 ! Get SFAR | |
720 | stxa %g0, [%g3] ASI_DMMU ! Clear SFSR.FaultValid bit | |
721 | membar #Sync | |
722 | sethi %hi(109f), %g7 | |
723 | ba,pt %xcc, etrap | |
724 | 109: or %g7, %lo(109b), %g7 | |
725 | mov %l4, %o1 | |
726 | mov %l5, %o2 | |
727 | call data_access_exception | |
728 | add %sp, PTREGS_OFF, %o0 | |
729 | ba,pt %xcc, rtrap | |
730 | clr %l6 | |
731 | ||
732 | .globl __do_instruction_access_exception | |
733 | .globl __do_instruction_access_exception_tl1 | |
734 | __do_instruction_access_exception_tl1: | |
735 | rdpr %pstate, %g4 | |
736 | wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate | |
737 | mov TLB_SFSR, %g3 | |
738 | mov DMMU_SFAR, %g5 | |
739 | ldxa [%g3] ASI_DMMU, %g4 ! Get SFSR | |
740 | ldxa [%g5] ASI_DMMU, %g5 ! Get SFAR | |
741 | stxa %g0, [%g3] ASI_IMMU ! Clear FaultValid bit | |
742 | membar #Sync | |
743 | sethi %hi(109f), %g7 | |
744 | ba,pt %xcc, etraptl1 | |
745 | 109: or %g7, %lo(109b), %g7 | |
746 | mov %l4, %o1 | |
747 | mov %l5, %o2 | |
748 | call instruction_access_exception_tl1 | |
749 | add %sp, PTREGS_OFF, %o0 | |
750 | ba,pt %xcc, rtrap | |
751 | clr %l6 | |
752 | ||
753 | __do_instruction_access_exception: | |
754 | rdpr %pstate, %g4 | |
755 | wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate | |
756 | mov TLB_SFSR, %g3 | |
757 | mov DMMU_SFAR, %g5 | |
758 | ldxa [%g3] ASI_DMMU, %g4 ! Get SFSR | |
759 | ldxa [%g5] ASI_DMMU, %g5 ! Get SFAR | |
760 | stxa %g0, [%g3] ASI_IMMU ! Clear FaultValid bit | |
761 | membar #Sync | |
762 | sethi %hi(109f), %g7 | |
763 | ba,pt %xcc, etrap | |
764 | 109: or %g7, %lo(109b), %g7 | |
765 | mov %l4, %o1 | |
766 | mov %l5, %o2 | |
767 | call instruction_access_exception | |
768 | add %sp, PTREGS_OFF, %o0 | |
769 | ba,pt %xcc, rtrap | |
770 | clr %l6 | |
771 | ||
772 | /* This is the trap handler entry point for ECC correctable | |
773 | * errors. They are corrected, but we listen for the trap | |
774 | * so that the event can be logged. | |
775 | * | |
776 | * Disrupting errors are either: | |
777 | * 1) single-bit ECC errors during UDB reads to system | |
778 | * memory | |
779 | * 2) data parity errors during write-back events | |
780 | * | |
781 | * As far as I can make out from the manual, the CEE trap | |
782 | * is only for correctable errors during memory read | |
783 | * accesses by the front-end of the processor. | |
784 | * | |
785 | * The code below is only for trap level 1 CEE events, | |
786 | * as it is the only situation where we can safely record | |
787 | * and log. For trap level >1 we just clear the CE bit | |
788 | * in the AFSR and return. | |
789 | */ | |
790 | ||
791 | /* Our trap handling infrastructure allows us to preserve | |
792 | * two 64-bit values during etrap for arguments to | |
793 | * subsequent C code. Therefore we encode the information | |
794 | * as follows: | |
795 | * | |
796 | * value 1) Full 64-bits of AFAR | |
797 | * value 2) Low 33-bits of AFSR, then bits 33-->42 | |
798 | * are UDBL error status and bits 43-->52 | |
799 | * are UDBH error status | |
800 | */ | |
801 | .align 64 | |
802 | .globl cee_trap | |
803 | cee_trap: | |
804 | ldxa [%g0] ASI_AFSR, %g1 ! Read AFSR | |
805 | ldxa [%g0] ASI_AFAR, %g2 ! Read AFAR | |
806 | sllx %g1, 31, %g1 ! Clear reserved bits | |
807 | srlx %g1, 31, %g1 ! in AFSR | |
808 | ||
809 | /* NOTE: UltraSparc-I/II have high and low UDB error | |
810 | * registers, corresponding to the two UDB units | |
811 | * present on those chips. UltraSparc-IIi only | |
812 | * has a single UDB, called "SDB" in the manual. | |
813 | * For IIi the upper UDB register always reads | |
814 | * as zero so for our purposes things will just | |
815 | * work with the checks below. | |
816 | */ | |
817 | ldxa [%g0] ASI_UDBL_ERROR_R, %g3 ! Read UDB-Low error status | |
818 | andcc %g3, (1 << 8), %g4 ! Check CE bit | |
819 | sllx %g3, (64 - 10), %g3 ! Clear reserved bits | |
820 | srlx %g3, (64 - 10), %g3 ! in UDB-Low error status | |
821 | ||
822 | sllx %g3, (33 + 0), %g3 ! Shift up to encoding area | |
823 | or %g1, %g3, %g1 ! Or it in | |
824 | be,pn %xcc, 1f ! Branch if CE bit was clear | |
825 | nop | |
826 | stxa %g4, [%g0] ASI_UDB_ERROR_W ! Clear CE sticky bit in UDBL | |
827 | membar #Sync ! Synchronize ASI stores | |
828 | 1: mov 0x18, %g5 ! Addr of UDB-High error status | |
829 | ldxa [%g5] ASI_UDBH_ERROR_R, %g3 ! Read it | |
830 | ||
831 | andcc %g3, (1 << 8), %g4 ! Check CE bit | |
832 | sllx %g3, (64 - 10), %g3 ! Clear reserved bits | |
833 | srlx %g3, (64 - 10), %g3 ! in UDB-High error status | |
834 | sllx %g3, (33 + 10), %g3 ! Shift up to encoding area | |
835 | or %g1, %g3, %g1 ! Or it in | |
836 | be,pn %xcc, 1f ! Branch if CE bit was clear | |
837 | nop | |
838 | nop | |
839 | ||
840 | stxa %g4, [%g5] ASI_UDB_ERROR_W ! Clear CE sticky bit in UDBH | |
841 | membar #Sync ! Synchronize ASI stores | |
842 | 1: mov 1, %g5 ! AFSR CE bit is | |
843 | sllx %g5, 20, %g5 ! bit 20 | |
844 | stxa %g5, [%g0] ASI_AFSR ! Clear CE sticky bit in AFSR | |
845 | membar #Sync ! Synchronize ASI stores | |
846 | sllx %g2, (64 - 41), %g2 ! Clear reserved bits | |
847 | srlx %g2, (64 - 41), %g2 ! in latched AFAR | |
848 | ||
849 | andn %g2, 0x0f, %g2 ! Finish resv bit clearing | |
850 | mov %g1, %g4 ! Move AFSR+UDB* into save reg | |
851 | mov %g2, %g5 ! Move AFAR into save reg | |
852 | rdpr %pil, %g2 | |
853 | wrpr %g0, 15, %pil | |
854 | ba,pt %xcc, etrap_irq | |
855 | rd %pc, %g7 | |
856 | mov %l4, %o0 | |
857 | ||
858 | mov %l5, %o1 | |
859 | call cee_log | |
860 | add %sp, PTREGS_OFF, %o2 | |
861 | ba,a,pt %xcc, rtrap_irq | |
862 | ||
863 | /* Capture I/D/E-cache state into per-cpu error scoreboard. | |
864 | * | |
865 | * %g1: (TL>=0) ? 1 : 0 | |
866 | * %g2: scratch | |
867 | * %g3: scratch | |
868 | * %g4: AFSR | |
869 | * %g5: AFAR | |
870 | * %g6: current thread ptr | |
871 | * %g7: scratch | |
872 | */ | |
873 | #define CHEETAH_LOG_ERROR \ | |
874 | /* Put "TL1" software bit into AFSR. */ \ | |
875 | and %g1, 0x1, %g1; \ | |
876 | sllx %g1, 63, %g2; \ | |
877 | or %g4, %g2, %g4; \ | |
878 | /* Get log entry pointer for this cpu at this trap level. */ \ | |
879 | BRANCH_IF_JALAPENO(g2,g3,50f) \ | |
880 | ldxa [%g0] ASI_SAFARI_CONFIG, %g2; \ | |
881 | srlx %g2, 17, %g2; \ | |
882 | ba,pt %xcc, 60f; \ | |
883 | and %g2, 0x3ff, %g2; \ | |
884 | 50: ldxa [%g0] ASI_JBUS_CONFIG, %g2; \ | |
885 | srlx %g2, 17, %g2; \ | |
886 | and %g2, 0x1f, %g2; \ | |
887 | 60: sllx %g2, 9, %g2; \ | |
888 | sethi %hi(cheetah_error_log), %g3; \ | |
889 | ldx [%g3 + %lo(cheetah_error_log)], %g3; \ | |
890 | brz,pn %g3, 80f; \ | |
891 | nop; \ | |
892 | add %g3, %g2, %g3; \ | |
893 | sllx %g1, 8, %g1; \ | |
894 | add %g3, %g1, %g1; \ | |
895 | /* %g1 holds pointer to the top of the logging scoreboard */ \ | |
896 | ldx [%g1 + 0x0], %g7; \ | |
897 | cmp %g7, -1; \ | |
898 | bne,pn %xcc, 80f; \ | |
899 | nop; \ | |
900 | stx %g4, [%g1 + 0x0]; \ | |
901 | stx %g5, [%g1 + 0x8]; \ | |
902 | add %g1, 0x10, %g1; \ | |
903 | /* %g1 now points to D-cache logging area */ \ | |
904 | set 0x3ff8, %g2; /* DC_addr mask */ \ | |
905 | and %g5, %g2, %g2; /* DC_addr bits of AFAR */ \ | |
906 | srlx %g5, 12, %g3; \ | |
907 | or %g3, 1, %g3; /* PHYS tag + valid */ \ | |
908 | 10: ldxa [%g2] ASI_DCACHE_TAG, %g7; \ | |
909 | cmp %g3, %g7; /* TAG match? */ \ | |
910 | bne,pt %xcc, 13f; \ | |
911 | nop; \ | |
912 | /* Yep, what we want, capture state. */ \ | |
913 | stx %g2, [%g1 + 0x20]; \ | |
914 | stx %g7, [%g1 + 0x28]; \ | |
915 | /* A membar Sync is required before and after utag access. */ \ | |
916 | membar #Sync; \ | |
917 | ldxa [%g2] ASI_DCACHE_UTAG, %g7; \ | |
918 | membar #Sync; \ | |
919 | stx %g7, [%g1 + 0x30]; \ | |
920 | ldxa [%g2] ASI_DCACHE_SNOOP_TAG, %g7; \ | |
921 | stx %g7, [%g1 + 0x38]; \ | |
922 | clr %g3; \ | |
923 | 12: ldxa [%g2 + %g3] ASI_DCACHE_DATA, %g7; \ | |
924 | stx %g7, [%g1]; \ | |
925 | add %g3, (1 << 5), %g3; \ | |
926 | cmp %g3, (4 << 5); \ | |
927 | bl,pt %xcc, 12b; \ | |
928 | add %g1, 0x8, %g1; \ | |
929 | ba,pt %xcc, 20f; \ | |
930 | add %g1, 0x20, %g1; \ | |
931 | 13: sethi %hi(1 << 14), %g7; \ | |
932 | add %g2, %g7, %g2; \ | |
933 | srlx %g2, 14, %g7; \ | |
934 | cmp %g7, 4; \ | |
935 | bl,pt %xcc, 10b; \ | |
936 | nop; \ | |
937 | add %g1, 0x40, %g1; \ | |
938 | 20: /* %g1 now points to I-cache logging area */ \ | |
939 | set 0x1fe0, %g2; /* IC_addr mask */ \ | |
940 | and %g5, %g2, %g2; /* IC_addr bits of AFAR */ \ | |
941 | sllx %g2, 1, %g2; /* IC_addr[13:6]==VA[12:5] */ \ | |
942 | srlx %g5, (13 - 8), %g3; /* Make PTAG */ \ | |
943 | andn %g3, 0xff, %g3; /* Mask off undefined bits */ \ | |
944 | 21: ldxa [%g2] ASI_IC_TAG, %g7; \ | |
945 | andn %g7, 0xff, %g7; \ | |
946 | cmp %g3, %g7; \ | |
947 | bne,pt %xcc, 23f; \ | |
948 | nop; \ | |
949 | /* Yep, what we want, capture state. */ \ | |
950 | stx %g2, [%g1 + 0x40]; \ | |
951 | stx %g7, [%g1 + 0x48]; \ | |
952 | add %g2, (1 << 3), %g2; \ | |
953 | ldxa [%g2] ASI_IC_TAG, %g7; \ | |
954 | add %g2, (1 << 3), %g2; \ | |
955 | stx %g7, [%g1 + 0x50]; \ | |
956 | ldxa [%g2] ASI_IC_TAG, %g7; \ | |
957 | add %g2, (1 << 3), %g2; \ | |
958 | stx %g7, [%g1 + 0x60]; \ | |
959 | ldxa [%g2] ASI_IC_TAG, %g7; \ | |
960 | stx %g7, [%g1 + 0x68]; \ | |
961 | sub %g2, (3 << 3), %g2; \ | |
962 | ldxa [%g2] ASI_IC_STAG, %g7; \ | |
963 | stx %g7, [%g1 + 0x58]; \ | |
964 | clr %g3; \ | |
965 | srlx %g2, 2, %g2; \ | |
966 | 22: ldxa [%g2 + %g3] ASI_IC_INSTR, %g7; \ | |
967 | stx %g7, [%g1]; \ | |
968 | add %g3, (1 << 3), %g3; \ | |
969 | cmp %g3, (8 << 3); \ | |
970 | bl,pt %xcc, 22b; \ | |
971 | add %g1, 0x8, %g1; \ | |
972 | ba,pt %xcc, 30f; \ | |
973 | add %g1, 0x30, %g1; \ | |
974 | 23: sethi %hi(1 << 14), %g7; \ | |
975 | add %g2, %g7, %g2; \ | |
976 | srlx %g2, 14, %g7; \ | |
977 | cmp %g7, 4; \ | |
978 | bl,pt %xcc, 21b; \ | |
979 | nop; \ | |
980 | add %g1, 0x70, %g1; \ | |
981 | 30: /* %g1 now points to E-cache logging area */ \ | |
982 | andn %g5, (32 - 1), %g2; /* E-cache subblock */ \ | |
983 | stx %g2, [%g1 + 0x20]; \ | |
984 | ldxa [%g2] ASI_EC_TAG_DATA, %g7; \ | |
985 | stx %g7, [%g1 + 0x28]; \ | |
986 | ldxa [%g2] ASI_EC_R, %g0; \ | |
987 | clr %g3; \ | |
988 | 31: ldxa [%g3] ASI_EC_DATA, %g7; \ | |
989 | stx %g7, [%g1 + %g3]; \ | |
990 | add %g3, 0x8, %g3; \ | |
991 | cmp %g3, 0x20; \ | |
992 | bl,pt %xcc, 31b; \ | |
993 | nop; \ | |
994 | 80: /* DONE */ | |
995 | ||
996 | /* These get patched into the trap table at boot time | |
997 | * once we know we have a cheetah processor. | |
998 | */ | |
999 | .globl cheetah_fecc_trap_vector, cheetah_fecc_trap_vector_tl1 | |
1000 | cheetah_fecc_trap_vector: | |
1001 | membar #Sync | |
1002 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1 | |
1003 | andn %g1, DCU_DC | DCU_IC, %g1 | |
1004 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG | |
1005 | membar #Sync | |
1006 | sethi %hi(cheetah_fast_ecc), %g2 | |
1007 | jmpl %g2 + %lo(cheetah_fast_ecc), %g0 | |
1008 | mov 0, %g1 | |
1009 | cheetah_fecc_trap_vector_tl1: | |
1010 | membar #Sync | |
1011 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1 | |
1012 | andn %g1, DCU_DC | DCU_IC, %g1 | |
1013 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG | |
1014 | membar #Sync | |
1015 | sethi %hi(cheetah_fast_ecc), %g2 | |
1016 | jmpl %g2 + %lo(cheetah_fast_ecc), %g0 | |
1017 | mov 1, %g1 | |
1018 | .globl cheetah_cee_trap_vector, cheetah_cee_trap_vector_tl1 | |
1019 | cheetah_cee_trap_vector: | |
1020 | membar #Sync | |
1021 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1 | |
1022 | andn %g1, DCU_IC, %g1 | |
1023 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG | |
1024 | membar #Sync | |
1025 | sethi %hi(cheetah_cee), %g2 | |
1026 | jmpl %g2 + %lo(cheetah_cee), %g0 | |
1027 | mov 0, %g1 | |
1028 | cheetah_cee_trap_vector_tl1: | |
1029 | membar #Sync | |
1030 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1 | |
1031 | andn %g1, DCU_IC, %g1 | |
1032 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG | |
1033 | membar #Sync | |
1034 | sethi %hi(cheetah_cee), %g2 | |
1035 | jmpl %g2 + %lo(cheetah_cee), %g0 | |
1036 | mov 1, %g1 | |
1037 | .globl cheetah_deferred_trap_vector, cheetah_deferred_trap_vector_tl1 | |
1038 | cheetah_deferred_trap_vector: | |
1039 | membar #Sync | |
1040 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1; | |
1041 | andn %g1, DCU_DC | DCU_IC, %g1; | |
1042 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG; | |
1043 | membar #Sync; | |
1044 | sethi %hi(cheetah_deferred_trap), %g2 | |
1045 | jmpl %g2 + %lo(cheetah_deferred_trap), %g0 | |
1046 | mov 0, %g1 | |
1047 | cheetah_deferred_trap_vector_tl1: | |
1048 | membar #Sync; | |
1049 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1; | |
1050 | andn %g1, DCU_DC | DCU_IC, %g1; | |
1051 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG; | |
1052 | membar #Sync; | |
1053 | sethi %hi(cheetah_deferred_trap), %g2 | |
1054 | jmpl %g2 + %lo(cheetah_deferred_trap), %g0 | |
1055 | mov 1, %g1 | |
1056 | ||
1057 | /* Cheetah+ specific traps. These are for the new I/D cache parity | |
1058 | * error traps. The first argument to cheetah_plus_parity_handler | |
1059 | * is encoded as follows: | |
1060 | * | |
1061 | * Bit0: 0=dcache,1=icache | |
1062 | * Bit1: 0=recoverable,1=unrecoverable | |
1063 | */ | |
1064 | .globl cheetah_plus_dcpe_trap_vector, cheetah_plus_dcpe_trap_vector_tl1 | |
1065 | cheetah_plus_dcpe_trap_vector: | |
1066 | membar #Sync | |
1067 | sethi %hi(do_cheetah_plus_data_parity), %g7 | |
1068 | jmpl %g7 + %lo(do_cheetah_plus_data_parity), %g0 | |
1069 | nop | |
1070 | nop | |
1071 | nop | |
1072 | nop | |
1073 | nop | |
1074 | ||
1075 | do_cheetah_plus_data_parity: | |
1076 | ba,pt %xcc, etrap | |
1077 | rd %pc, %g7 | |
1078 | mov 0x0, %o0 | |
1079 | call cheetah_plus_parity_error | |
1080 | add %sp, PTREGS_OFF, %o1 | |
1081 | ba,pt %xcc, rtrap | |
1082 | clr %l6 | |
1083 | ||
1084 | cheetah_plus_dcpe_trap_vector_tl1: | |
1085 | membar #Sync | |
1086 | wrpr PSTATE_IG | PSTATE_PEF | PSTATE_PRIV, %pstate | |
1087 | sethi %hi(do_dcpe_tl1), %g3 | |
1088 | jmpl %g3 + %lo(do_dcpe_tl1), %g0 | |
1089 | nop | |
1090 | nop | |
1091 | nop | |
1092 | nop | |
1093 | ||
1094 | .globl cheetah_plus_icpe_trap_vector, cheetah_plus_icpe_trap_vector_tl1 | |
1095 | cheetah_plus_icpe_trap_vector: | |
1096 | membar #Sync | |
1097 | sethi %hi(do_cheetah_plus_insn_parity), %g7 | |
1098 | jmpl %g7 + %lo(do_cheetah_plus_insn_parity), %g0 | |
1099 | nop | |
1100 | nop | |
1101 | nop | |
1102 | nop | |
1103 | nop | |
1104 | ||
1105 | do_cheetah_plus_insn_parity: | |
1106 | ba,pt %xcc, etrap | |
1107 | rd %pc, %g7 | |
1108 | mov 0x1, %o0 | |
1109 | call cheetah_plus_parity_error | |
1110 | add %sp, PTREGS_OFF, %o1 | |
1111 | ba,pt %xcc, rtrap | |
1112 | clr %l6 | |
1113 | ||
1114 | cheetah_plus_icpe_trap_vector_tl1: | |
1115 | membar #Sync | |
1116 | wrpr PSTATE_IG | PSTATE_PEF | PSTATE_PRIV, %pstate | |
1117 | sethi %hi(do_icpe_tl1), %g3 | |
1118 | jmpl %g3 + %lo(do_icpe_tl1), %g0 | |
1119 | nop | |
1120 | nop | |
1121 | nop | |
1122 | nop | |
1123 | ||
1124 | /* If we take one of these traps when tl >= 1, then we | |
1125 | * jump to interrupt globals. If some trap level above us | |
1126 | * was also using interrupt globals, we cannot recover. | |
1127 | * We may use all interrupt global registers except %g6. | |
1128 | */ | |
1129 | .globl do_dcpe_tl1, do_icpe_tl1 | |
1130 | do_dcpe_tl1: | |
1131 | rdpr %tl, %g1 ! Save original trap level | |
1132 | mov 1, %g2 ! Setup TSTATE checking loop | |
1133 | sethi %hi(TSTATE_IG), %g3 ! TSTATE mask bit | |
1134 | 1: wrpr %g2, %tl ! Set trap level to check | |
1135 | rdpr %tstate, %g4 ! Read TSTATE for this level | |
1136 | andcc %g4, %g3, %g0 ! Interrupt globals in use? | |
1137 | bne,a,pn %xcc, do_dcpe_tl1_fatal ! Yep, irrecoverable | |
1138 | wrpr %g1, %tl ! Restore original trap level | |
1139 | add %g2, 1, %g2 ! Next trap level | |
1140 | cmp %g2, %g1 ! Hit them all yet? | |
1141 | ble,pt %icc, 1b ! Not yet | |
1142 | nop | |
1143 | wrpr %g1, %tl ! Restore original trap level | |
1144 | do_dcpe_tl1_nonfatal: /* Ok we may use interrupt globals safely. */ | |
1145 | /* Reset D-cache parity */ | |
1146 | sethi %hi(1 << 16), %g1 ! D-cache size | |
1147 | mov (1 << 5), %g2 ! D-cache line size | |
1148 | sub %g1, %g2, %g1 ! Move down 1 cacheline | |
1149 | 1: srl %g1, 14, %g3 ! Compute UTAG | |
1150 | membar #Sync | |
1151 | stxa %g3, [%g1] ASI_DCACHE_UTAG | |
1152 | membar #Sync | |
1153 | sub %g2, 8, %g3 ! 64-bit data word within line | |
1154 | 2: membar #Sync | |
1155 | stxa %g0, [%g1 + %g3] ASI_DCACHE_DATA | |
1156 | membar #Sync | |
1157 | subcc %g3, 8, %g3 ! Next 64-bit data word | |
1158 | bge,pt %icc, 2b | |
1159 | nop | |
1160 | subcc %g1, %g2, %g1 ! Next cacheline | |
1161 | bge,pt %icc, 1b | |
1162 | nop | |
1163 | ba,pt %xcc, dcpe_icpe_tl1_common | |
1164 | nop | |
1165 | ||
1166 | do_dcpe_tl1_fatal: | |
1167 | sethi %hi(1f), %g7 | |
1168 | ba,pt %xcc, etraptl1 | |
1169 | 1: or %g7, %lo(1b), %g7 | |
1170 | mov 0x2, %o0 | |
1171 | call cheetah_plus_parity_error | |
1172 | add %sp, PTREGS_OFF, %o1 | |
1173 | ba,pt %xcc, rtrap | |
1174 | clr %l6 | |
1175 | ||
1176 | do_icpe_tl1: | |
1177 | rdpr %tl, %g1 ! Save original trap level | |
1178 | mov 1, %g2 ! Setup TSTATE checking loop | |
1179 | sethi %hi(TSTATE_IG), %g3 ! TSTATE mask bit | |
1180 | 1: wrpr %g2, %tl ! Set trap level to check | |
1181 | rdpr %tstate, %g4 ! Read TSTATE for this level | |
1182 | andcc %g4, %g3, %g0 ! Interrupt globals in use? | |
1183 | bne,a,pn %xcc, do_icpe_tl1_fatal ! Yep, irrecoverable | |
1184 | wrpr %g1, %tl ! Restore original trap level | |
1185 | add %g2, 1, %g2 ! Next trap level | |
1186 | cmp %g2, %g1 ! Hit them all yet? | |
1187 | ble,pt %icc, 1b ! Not yet | |
1188 | nop | |
1189 | wrpr %g1, %tl ! Restore original trap level | |
1190 | do_icpe_tl1_nonfatal: /* Ok we may use interrupt globals safely. */ | |
1191 | /* Flush I-cache */ | |
1192 | sethi %hi(1 << 15), %g1 ! I-cache size | |
1193 | mov (1 << 5), %g2 ! I-cache line size | |
1194 | sub %g1, %g2, %g1 | |
1195 | 1: or %g1, (2 << 3), %g3 | |
1196 | stxa %g0, [%g3] ASI_IC_TAG | |
1197 | membar #Sync | |
1198 | subcc %g1, %g2, %g1 | |
1199 | bge,pt %icc, 1b | |
1200 | nop | |
1201 | ba,pt %xcc, dcpe_icpe_tl1_common | |
1202 | nop | |
1203 | ||
1204 | do_icpe_tl1_fatal: | |
1205 | sethi %hi(1f), %g7 | |
1206 | ba,pt %xcc, etraptl1 | |
1207 | 1: or %g7, %lo(1b), %g7 | |
1208 | mov 0x3, %o0 | |
1209 | call cheetah_plus_parity_error | |
1210 | add %sp, PTREGS_OFF, %o1 | |
1211 | ba,pt %xcc, rtrap | |
1212 | clr %l6 | |
1213 | ||
1214 | dcpe_icpe_tl1_common: | |
1215 | /* Flush D-cache, re-enable D/I caches in DCU and finally | |
1216 | * retry the trapping instruction. | |
1217 | */ | |
1218 | sethi %hi(1 << 16), %g1 ! D-cache size | |
1219 | mov (1 << 5), %g2 ! D-cache line size | |
1220 | sub %g1, %g2, %g1 | |
1221 | 1: stxa %g0, [%g1] ASI_DCACHE_TAG | |
1222 | membar #Sync | |
1223 | subcc %g1, %g2, %g1 | |
1224 | bge,pt %icc, 1b | |
1225 | nop | |
1226 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1 | |
1227 | or %g1, (DCU_DC | DCU_IC), %g1 | |
1228 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG | |
1229 | membar #Sync | |
1230 | retry | |
1231 | ||
1232 | /* Cheetah FECC trap handling, we get here from tl{0,1}_fecc | |
1233 | * in the trap table. That code has done a memory barrier | |
1234 | * and has disabled both the I-cache and D-cache in the DCU | |
1235 | * control register. The I-cache is disabled so that we may | |
1236 | * capture the corrupted cache line, and the D-cache is disabled | |
1237 | * because corrupt data may have been placed there and we don't | |
1238 | * want to reference it. | |
1239 | * | |
1240 | * %g1 is one if this trap occurred at %tl >= 1. | |
1241 | * | |
1242 | * Next, we turn off error reporting so that we don't recurse. | |
1243 | */ | |
1244 | .globl cheetah_fast_ecc | |
1245 | cheetah_fast_ecc: | |
1246 | ldxa [%g0] ASI_ESTATE_ERROR_EN, %g2 | |
1247 | andn %g2, ESTATE_ERROR_NCEEN | ESTATE_ERROR_CEEN, %g2 | |
1248 | stxa %g2, [%g0] ASI_ESTATE_ERROR_EN | |
1249 | membar #Sync | |
1250 | ||
1251 | /* Fetch and clear AFSR/AFAR */ | |
1252 | ldxa [%g0] ASI_AFSR, %g4 | |
1253 | ldxa [%g0] ASI_AFAR, %g5 | |
1254 | stxa %g4, [%g0] ASI_AFSR | |
1255 | membar #Sync | |
1256 | ||
1257 | CHEETAH_LOG_ERROR | |
1258 | ||
1259 | rdpr %pil, %g2 | |
1260 | wrpr %g0, 15, %pil | |
1261 | ba,pt %xcc, etrap_irq | |
1262 | rd %pc, %g7 | |
1263 | mov %l4, %o1 | |
1264 | mov %l5, %o2 | |
1265 | call cheetah_fecc_handler | |
1266 | add %sp, PTREGS_OFF, %o0 | |
1267 | ba,a,pt %xcc, rtrap_irq | |
1268 | ||
1269 | /* Our caller has disabled I-cache and performed membar Sync. */ | |
1270 | .globl cheetah_cee | |
1271 | cheetah_cee: | |
1272 | ldxa [%g0] ASI_ESTATE_ERROR_EN, %g2 | |
1273 | andn %g2, ESTATE_ERROR_CEEN, %g2 | |
1274 | stxa %g2, [%g0] ASI_ESTATE_ERROR_EN | |
1275 | membar #Sync | |
1276 | ||
1277 | /* Fetch and clear AFSR/AFAR */ | |
1278 | ldxa [%g0] ASI_AFSR, %g4 | |
1279 | ldxa [%g0] ASI_AFAR, %g5 | |
1280 | stxa %g4, [%g0] ASI_AFSR | |
1281 | membar #Sync | |
1282 | ||
1283 | CHEETAH_LOG_ERROR | |
1284 | ||
1285 | rdpr %pil, %g2 | |
1286 | wrpr %g0, 15, %pil | |
1287 | ba,pt %xcc, etrap_irq | |
1288 | rd %pc, %g7 | |
1289 | mov %l4, %o1 | |
1290 | mov %l5, %o2 | |
1291 | call cheetah_cee_handler | |
1292 | add %sp, PTREGS_OFF, %o0 | |
1293 | ba,a,pt %xcc, rtrap_irq | |
1294 | ||
1295 | /* Our caller has disabled I-cache+D-cache and performed membar Sync. */ | |
1296 | .globl cheetah_deferred_trap | |
1297 | cheetah_deferred_trap: | |
1298 | ldxa [%g0] ASI_ESTATE_ERROR_EN, %g2 | |
1299 | andn %g2, ESTATE_ERROR_NCEEN | ESTATE_ERROR_CEEN, %g2 | |
1300 | stxa %g2, [%g0] ASI_ESTATE_ERROR_EN | |
1301 | membar #Sync | |
1302 | ||
1303 | /* Fetch and clear AFSR/AFAR */ | |
1304 | ldxa [%g0] ASI_AFSR, %g4 | |
1305 | ldxa [%g0] ASI_AFAR, %g5 | |
1306 | stxa %g4, [%g0] ASI_AFSR | |
1307 | membar #Sync | |
1308 | ||
1309 | CHEETAH_LOG_ERROR | |
1310 | ||
1311 | rdpr %pil, %g2 | |
1312 | wrpr %g0, 15, %pil | |
1313 | ba,pt %xcc, etrap_irq | |
1314 | rd %pc, %g7 | |
1315 | mov %l4, %o1 | |
1316 | mov %l5, %o2 | |
1317 | call cheetah_deferred_handler | |
1318 | add %sp, PTREGS_OFF, %o0 | |
1319 | ba,a,pt %xcc, rtrap_irq | |
1320 | ||
1321 | .globl __do_privact | |
1322 | __do_privact: | |
1323 | mov TLB_SFSR, %g3 | |
1324 | stxa %g0, [%g3] ASI_DMMU ! Clear FaultValid bit | |
1325 | membar #Sync | |
1326 | sethi %hi(109f), %g7 | |
1327 | ba,pt %xcc, etrap | |
1328 | 109: or %g7, %lo(109b), %g7 | |
1329 | call do_privact | |
1330 | add %sp, PTREGS_OFF, %o0 | |
1331 | ba,pt %xcc, rtrap | |
1332 | clr %l6 | |
1333 | ||
1334 | .globl do_mna | |
1335 | do_mna: | |
1336 | rdpr %tl, %g3 | |
1337 | cmp %g3, 1 | |
1338 | ||
1339 | /* Setup %g4/%g5 now as they are used in the | |
1340 | * winfixup code. | |
1341 | */ | |
1342 | mov TLB_SFSR, %g3 | |
1343 | mov DMMU_SFAR, %g4 | |
1344 | ldxa [%g4] ASI_DMMU, %g4 | |
1345 | ldxa [%g3] ASI_DMMU, %g5 | |
1346 | stxa %g0, [%g3] ASI_DMMU ! Clear FaultValid bit | |
1347 | membar #Sync | |
1348 | bgu,pn %icc, winfix_mna | |
1349 | rdpr %tpc, %g3 | |
1350 | ||
1351 | 1: sethi %hi(109f), %g7 | |
1352 | ba,pt %xcc, etrap | |
1353 | 109: or %g7, %lo(109b), %g7 | |
1354 | mov %l4, %o1 | |
1355 | mov %l5, %o2 | |
1356 | call mem_address_unaligned | |
1357 | add %sp, PTREGS_OFF, %o0 | |
1358 | ba,pt %xcc, rtrap | |
1359 | clr %l6 | |
1360 | ||
1361 | .globl do_lddfmna | |
1362 | do_lddfmna: | |
1363 | sethi %hi(109f), %g7 | |
1364 | mov TLB_SFSR, %g4 | |
1365 | ldxa [%g4] ASI_DMMU, %g5 | |
1366 | stxa %g0, [%g4] ASI_DMMU ! Clear FaultValid bit | |
1367 | membar #Sync | |
1368 | mov DMMU_SFAR, %g4 | |
1369 | ldxa [%g4] ASI_DMMU, %g4 | |
1370 | ba,pt %xcc, etrap | |
1371 | 109: or %g7, %lo(109b), %g7 | |
1372 | mov %l4, %o1 | |
1373 | mov %l5, %o2 | |
1374 | call handle_lddfmna | |
1375 | add %sp, PTREGS_OFF, %o0 | |
1376 | ba,pt %xcc, rtrap | |
1377 | clr %l6 | |
1378 | ||
1379 | .globl do_stdfmna | |
1380 | do_stdfmna: | |
1381 | sethi %hi(109f), %g7 | |
1382 | mov TLB_SFSR, %g4 | |
1383 | ldxa [%g4] ASI_DMMU, %g5 | |
1384 | stxa %g0, [%g4] ASI_DMMU ! Clear FaultValid bit | |
1385 | membar #Sync | |
1386 | mov DMMU_SFAR, %g4 | |
1387 | ldxa [%g4] ASI_DMMU, %g4 | |
1388 | ba,pt %xcc, etrap | |
1389 | 109: or %g7, %lo(109b), %g7 | |
1390 | mov %l4, %o1 | |
1391 | mov %l5, %o2 | |
1392 | call handle_stdfmna | |
1393 | add %sp, PTREGS_OFF, %o0 | |
1394 | ba,pt %xcc, rtrap | |
1395 | clr %l6 | |
1396 | ||
1397 | .globl breakpoint_trap | |
1398 | breakpoint_trap: | |
1399 | call sparc_breakpoint | |
1400 | add %sp, PTREGS_OFF, %o0 | |
1401 | ba,pt %xcc, rtrap | |
1402 | nop | |
1403 | ||
1404 | #if defined(CONFIG_SUNOS_EMUL) || defined(CONFIG_SOLARIS_EMUL) || \ | |
1405 | defined(CONFIG_SOLARIS_EMUL_MODULE) | |
1406 | /* SunOS uses syscall zero as the 'indirect syscall' it looks | |
1407 | * like indir_syscall(scall_num, arg0, arg1, arg2...); etc. | |
1408 | * This is complete brain damage. | |
1409 | */ | |
1410 | .globl sunos_indir | |
1411 | sunos_indir: | |
1412 | srl %o0, 0, %o0 | |
1413 | mov %o7, %l4 | |
1414 | cmp %o0, NR_SYSCALLS | |
1415 | blu,a,pt %icc, 1f | |
1416 | sll %o0, 0x2, %o0 | |
1417 | sethi %hi(sunos_nosys), %l6 | |
1418 | b,pt %xcc, 2f | |
1419 | or %l6, %lo(sunos_nosys), %l6 | |
1420 | 1: sethi %hi(sunos_sys_table), %l7 | |
1421 | or %l7, %lo(sunos_sys_table), %l7 | |
1422 | lduw [%l7 + %o0], %l6 | |
1423 | 2: mov %o1, %o0 | |
1424 | mov %o2, %o1 | |
1425 | mov %o3, %o2 | |
1426 | mov %o4, %o3 | |
1427 | mov %o5, %o4 | |
1428 | call %l6 | |
1429 | mov %l4, %o7 | |
1430 | ||
1431 | .globl sunos_getpid | |
1432 | sunos_getpid: | |
1433 | call sys_getppid | |
1434 | nop | |
1435 | call sys_getpid | |
1436 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I1] | |
1437 | b,pt %xcc, ret_sys_call | |
1438 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I0] | |
1439 | ||
1440 | /* SunOS getuid() returns uid in %o0 and euid in %o1 */ | |
1441 | .globl sunos_getuid | |
1442 | sunos_getuid: | |
1443 | call sys32_geteuid16 | |
1444 | nop | |
1445 | call sys32_getuid16 | |
1446 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I1] | |
1447 | b,pt %xcc, ret_sys_call | |
1448 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I0] | |
1449 | ||
1450 | /* SunOS getgid() returns gid in %o0 and egid in %o1 */ | |
1451 | .globl sunos_getgid | |
1452 | sunos_getgid: | |
1453 | call sys32_getegid16 | |
1454 | nop | |
1455 | call sys32_getgid16 | |
1456 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I1] | |
1457 | b,pt %xcc, ret_sys_call | |
1458 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I0] | |
1459 | #endif | |
1460 | ||
1461 | /* SunOS's execv() call only specifies the argv argument, the | |
1462 | * environment settings are the same as the calling processes. | |
1463 | */ | |
1464 | .globl sunos_execv | |
1465 | sys_execve: | |
1466 | sethi %hi(sparc_execve), %g1 | |
1467 | ba,pt %xcc, execve_merge | |
1468 | or %g1, %lo(sparc_execve), %g1 | |
1469 | #ifdef CONFIG_COMPAT | |
1470 | .globl sys_execve | |
1471 | sunos_execv: | |
1472 | stx %g0, [%sp + PTREGS_OFF + PT_V9_I2] | |
1473 | .globl sys32_execve | |
1474 | sys32_execve: | |
1475 | sethi %hi(sparc32_execve), %g1 | |
1476 | or %g1, %lo(sparc32_execve), %g1 | |
1477 | #endif | |
1478 | execve_merge: | |
1479 | flushw | |
1480 | jmpl %g1, %g0 | |
1481 | add %sp, PTREGS_OFF, %o0 | |
1482 | ||
1483 | .globl sys_pipe, sys_sigpause, sys_nis_syscall | |
1484 | .globl sys_sigsuspend, sys_rt_sigsuspend | |
1485 | .globl sys_rt_sigreturn | |
1486 | .globl sys_ptrace | |
1487 | .globl sys_sigaltstack | |
1488 | .align 32 | |
1489 | sys_pipe: ba,pt %xcc, sparc_pipe | |
1490 | add %sp, PTREGS_OFF, %o0 | |
1491 | sys_nis_syscall:ba,pt %xcc, c_sys_nis_syscall | |
1492 | add %sp, PTREGS_OFF, %o0 | |
1493 | sys_memory_ordering: | |
1494 | ba,pt %xcc, sparc_memory_ordering | |
1495 | add %sp, PTREGS_OFF, %o1 | |
1496 | sys_sigaltstack:ba,pt %xcc, do_sigaltstack | |
1497 | add %i6, STACK_BIAS, %o2 | |
1498 | #ifdef CONFIG_COMPAT | |
1499 | .globl sys32_sigstack | |
1500 | sys32_sigstack: ba,pt %xcc, do_sys32_sigstack | |
1501 | mov %i6, %o2 | |
1502 | .globl sys32_sigaltstack | |
1503 | sys32_sigaltstack: | |
1504 | ba,pt %xcc, do_sys32_sigaltstack | |
1505 | mov %i6, %o2 | |
1506 | #endif | |
1507 | .align 32 | |
1508 | sys_sigsuspend: add %sp, PTREGS_OFF, %o0 | |
1509 | call do_sigsuspend | |
1510 | add %o7, 1f-.-4, %o7 | |
1511 | nop | |
1512 | sys_rt_sigsuspend: /* NOTE: %o0,%o1 have a correct value already */ | |
1513 | add %sp, PTREGS_OFF, %o2 | |
1514 | call do_rt_sigsuspend | |
1515 | add %o7, 1f-.-4, %o7 | |
1516 | nop | |
1517 | #ifdef CONFIG_COMPAT | |
1518 | .globl sys32_rt_sigsuspend | |
1519 | sys32_rt_sigsuspend: /* NOTE: %o0,%o1 have a correct value already */ | |
1520 | srl %o0, 0, %o0 | |
1521 | add %sp, PTREGS_OFF, %o2 | |
1522 | call do_rt_sigsuspend32 | |
1523 | add %o7, 1f-.-4, %o7 | |
1524 | #endif | |
1525 | /* NOTE: %o0 has a correct value already */ | |
1526 | sys_sigpause: add %sp, PTREGS_OFF, %o1 | |
1527 | call do_sigpause | |
1528 | add %o7, 1f-.-4, %o7 | |
1529 | nop | |
1530 | #ifdef CONFIG_COMPAT | |
1531 | .globl sys32_sigreturn | |
1532 | sys32_sigreturn: | |
1533 | add %sp, PTREGS_OFF, %o0 | |
1534 | call do_sigreturn32 | |
1535 | add %o7, 1f-.-4, %o7 | |
1536 | nop | |
1537 | #endif | |
1538 | sys_rt_sigreturn: | |
1539 | add %sp, PTREGS_OFF, %o0 | |
1540 | call do_rt_sigreturn | |
1541 | add %o7, 1f-.-4, %o7 | |
1542 | nop | |
1543 | #ifdef CONFIG_COMPAT | |
1544 | .globl sys32_rt_sigreturn | |
1545 | sys32_rt_sigreturn: | |
1546 | add %sp, PTREGS_OFF, %o0 | |
1547 | call do_rt_sigreturn32 | |
1548 | add %o7, 1f-.-4, %o7 | |
1549 | nop | |
1550 | #endif | |
1551 | sys_ptrace: add %sp, PTREGS_OFF, %o0 | |
1552 | call do_ptrace | |
1553 | add %o7, 1f-.-4, %o7 | |
1554 | nop | |
1555 | .align 32 | |
1556 | 1: ldx [%curptr + TI_FLAGS], %l5 | |
1557 | andcc %l5, _TIF_SYSCALL_TRACE, %g0 | |
1558 | be,pt %icc, rtrap | |
1559 | clr %l6 | |
1560 | call syscall_trace | |
1561 | nop | |
1562 | ||
1563 | ba,pt %xcc, rtrap | |
1564 | clr %l6 | |
1565 | ||
1566 | /* This is how fork() was meant to be done, 8 instruction entry. | |
1567 | * | |
1568 | * I questioned the following code briefly, let me clear things | |
1569 | * up so you must not reason on it like I did. | |
1570 | * | |
1571 | * Know the fork_kpsr etc. we use in the sparc32 port? We don't | |
1572 | * need it here because the only piece of window state we copy to | |
1573 | * the child is the CWP register. Even if the parent sleeps, | |
1574 | * we are safe because we stuck it into pt_regs of the parent | |
1575 | * so it will not change. | |
1576 | * | |
1577 | * XXX This raises the question, whether we can do the same on | |
1578 | * XXX sparc32 to get rid of fork_kpsr _and_ fork_kwim. The | |
1579 | * XXX answer is yes. We stick fork_kpsr in UREG_G0 and | |
1580 | * XXX fork_kwim in UREG_G1 (global registers are considered | |
1581 | * XXX volatile across a system call in the sparc ABI I think | |
1582 | * XXX if it isn't we can use regs->y instead, anyone who depends | |
1583 | * XXX upon the Y register being preserved across a fork deserves | |
1584 | * XXX to lose). | |
1585 | * | |
1586 | * In fact we should take advantage of that fact for other things | |
1587 | * during system calls... | |
1588 | */ | |
1589 | .globl sys_fork, sys_vfork, sys_clone, sparc_exit | |
1590 | .globl ret_from_syscall | |
1591 | .align 32 | |
1592 | sys_vfork: /* Under Linux, vfork and fork are just special cases of clone. */ | |
1593 | sethi %hi(0x4000 | 0x0100 | SIGCHLD), %o0 | |
1594 | or %o0, %lo(0x4000 | 0x0100 | SIGCHLD), %o0 | |
1595 | ba,pt %xcc, sys_clone | |
1596 | sys_fork: clr %o1 | |
1597 | mov SIGCHLD, %o0 | |
1598 | sys_clone: flushw | |
1599 | movrz %o1, %fp, %o1 | |
1600 | mov 0, %o3 | |
1601 | ba,pt %xcc, sparc_do_fork | |
1602 | add %sp, PTREGS_OFF, %o2 | |
1603 | ret_from_syscall: | |
1604 | /* Clear SPARC_FLAG_NEWCHILD, switch_to leaves thread.flags in | |
1605 | * %o7 for us. Check performance counter stuff too. | |
1606 | */ | |
1607 | andn %o7, _TIF_NEWCHILD, %l0 | |
1608 | stx %l0, [%g6 + TI_FLAGS] | |
1609 | call schedule_tail | |
1610 | mov %g7, %o0 | |
1611 | andcc %l0, _TIF_PERFCTR, %g0 | |
1612 | be,pt %icc, 1f | |
1613 | nop | |
1614 | ldx [%g6 + TI_PCR], %o7 | |
1615 | wr %g0, %o7, %pcr | |
1616 | ||
1617 | /* Blackbird errata workaround. See commentary in | |
1618 | * smp.c:smp_percpu_timer_interrupt() for more | |
1619 | * information. | |
1620 | */ | |
1621 | ba,pt %xcc, 99f | |
1622 | nop | |
1623 | .align 64 | |
1624 | 99: wr %g0, %g0, %pic | |
1625 | rd %pic, %g0 | |
1626 | ||
1627 | 1: b,pt %xcc, ret_sys_call | |
1628 | ldx [%sp + PTREGS_OFF + PT_V9_I0], %o0 | |
1629 | sparc_exit: wrpr %g0, (PSTATE_RMO | PSTATE_PEF | PSTATE_PRIV), %pstate | |
1630 | rdpr %otherwin, %g1 | |
1631 | rdpr %cansave, %g3 | |
1632 | add %g3, %g1, %g3 | |
1633 | wrpr %g3, 0x0, %cansave | |
1634 | wrpr %g0, 0x0, %otherwin | |
1635 | wrpr %g0, (PSTATE_RMO | PSTATE_PEF | PSTATE_PRIV | PSTATE_IE), %pstate | |
1636 | ba,pt %xcc, sys_exit | |
1637 | stb %g0, [%g6 + TI_WSAVED] | |
1638 | ||
1639 | linux_sparc_ni_syscall: | |
1640 | sethi %hi(sys_ni_syscall), %l7 | |
1641 | b,pt %xcc, 4f | |
1642 | or %l7, %lo(sys_ni_syscall), %l7 | |
1643 | ||
1644 | linux_syscall_trace32: | |
1645 | call syscall_trace | |
1646 | nop | |
1647 | srl %i0, 0, %o0 | |
1648 | mov %i4, %o4 | |
1649 | srl %i1, 0, %o1 | |
1650 | srl %i2, 0, %o2 | |
1651 | b,pt %xcc, 2f | |
1652 | srl %i3, 0, %o3 | |
1653 | ||
1654 | linux_syscall_trace: | |
1655 | call syscall_trace | |
1656 | nop | |
1657 | mov %i0, %o0 | |
1658 | mov %i1, %o1 | |
1659 | mov %i2, %o2 | |
1660 | mov %i3, %o3 | |
1661 | b,pt %xcc, 2f | |
1662 | mov %i4, %o4 | |
1663 | ||
1664 | ||
1665 | /* Linux 32-bit and SunOS system calls enter here... */ | |
1666 | .align 32 | |
1667 | .globl linux_sparc_syscall32 | |
1668 | linux_sparc_syscall32: | |
1669 | /* Direct access to user regs, much faster. */ | |
1670 | cmp %g1, NR_SYSCALLS ! IEU1 Group | |
1671 | bgeu,pn %xcc, linux_sparc_ni_syscall ! CTI | |
1672 | srl %i0, 0, %o0 ! IEU0 | |
1673 | sll %g1, 2, %l4 ! IEU0 Group | |
1674 | #ifdef SYSCALL_TRACING | |
1675 | call syscall_trace_entry | |
1676 | add %sp, PTREGS_OFF, %o0 | |
1677 | srl %i0, 0, %o0 | |
1678 | #endif | |
1679 | srl %i4, 0, %o4 ! IEU1 | |
1680 | lduw [%l7 + %l4], %l7 ! Load | |
1681 | srl %i1, 0, %o1 ! IEU0 Group | |
1682 | ldx [%curptr + TI_FLAGS], %l0 ! Load | |
1683 | ||
1684 | srl %i5, 0, %o5 ! IEU1 | |
1685 | srl %i2, 0, %o2 ! IEU0 Group | |
1686 | andcc %l0, _TIF_SYSCALL_TRACE, %g0 ! IEU0 Group | |
1687 | bne,pn %icc, linux_syscall_trace32 ! CTI | |
1688 | mov %i0, %l5 ! IEU1 | |
1689 | call %l7 ! CTI Group brk forced | |
1690 | srl %i3, 0, %o3 ! IEU0 | |
1691 | ba,a,pt %xcc, 3f | |
1692 | ||
1693 | /* Linux native and SunOS system calls enter here... */ | |
1694 | .align 32 | |
1695 | .globl linux_sparc_syscall, ret_sys_call | |
1696 | linux_sparc_syscall: | |
1697 | /* Direct access to user regs, much faster. */ | |
1698 | cmp %g1, NR_SYSCALLS ! IEU1 Group | |
1699 | bgeu,pn %xcc, linux_sparc_ni_syscall ! CTI | |
1700 | mov %i0, %o0 ! IEU0 | |
1701 | sll %g1, 2, %l4 ! IEU0 Group | |
1702 | #ifdef SYSCALL_TRACING | |
1703 | call syscall_trace_entry | |
1704 | add %sp, PTREGS_OFF, %o0 | |
1705 | mov %i0, %o0 | |
1706 | #endif | |
1707 | mov %i1, %o1 ! IEU1 | |
1708 | lduw [%l7 + %l4], %l7 ! Load | |
1709 | 4: mov %i2, %o2 ! IEU0 Group | |
1710 | ldx [%curptr + TI_FLAGS], %l0 ! Load | |
1711 | ||
1712 | mov %i3, %o3 ! IEU1 | |
1713 | mov %i4, %o4 ! IEU0 Group | |
1714 | andcc %l0, _TIF_SYSCALL_TRACE, %g0 ! IEU1 Group+1 bubble | |
1715 | bne,pn %icc, linux_syscall_trace ! CTI Group | |
1716 | mov %i0, %l5 ! IEU0 | |
1717 | 2: call %l7 ! CTI Group brk forced | |
1718 | mov %i5, %o5 ! IEU0 | |
1719 | nop | |
1720 | ||
1721 | 3: stx %o0, [%sp + PTREGS_OFF + PT_V9_I0] | |
1722 | ret_sys_call: | |
1723 | #ifdef SYSCALL_TRACING | |
1724 | mov %o0, %o1 | |
1725 | call syscall_trace_exit | |
1726 | add %sp, PTREGS_OFF, %o0 | |
1727 | mov %o1, %o0 | |
1728 | #endif | |
1729 | ldx [%sp + PTREGS_OFF + PT_V9_TSTATE], %g3 | |
1730 | ldx [%sp + PTREGS_OFF + PT_V9_TNPC], %l1 ! pc = npc | |
1731 | sra %o0, 0, %o0 | |
1732 | mov %ulo(TSTATE_XCARRY | TSTATE_ICARRY), %g2 | |
1733 | sllx %g2, 32, %g2 | |
1734 | ||
1735 | /* Check if force_successful_syscall_return() | |
1736 | * was invoked. | |
1737 | */ | |
1738 | ldx [%curptr + TI_FLAGS], %l0 | |
1739 | andcc %l0, _TIF_SYSCALL_SUCCESS, %g0 | |
1740 | be,pt %icc, 1f | |
1741 | andn %l0, _TIF_SYSCALL_SUCCESS, %l0 | |
1742 | ba,pt %xcc, 80f | |
1743 | stx %l0, [%curptr + TI_FLAGS] | |
1744 | ||
1745 | 1: | |
1746 | cmp %o0, -ERESTART_RESTARTBLOCK | |
1747 | bgeu,pn %xcc, 1f | |
1748 | andcc %l0, _TIF_SYSCALL_TRACE, %l6 | |
1749 | 80: | |
1750 | /* System call success, clear Carry condition code. */ | |
1751 | andn %g3, %g2, %g3 | |
1752 | stx %g3, [%sp + PTREGS_OFF + PT_V9_TSTATE] | |
1753 | bne,pn %icc, linux_syscall_trace2 | |
1754 | add %l1, 0x4, %l2 ! npc = npc+4 | |
1755 | stx %l1, [%sp + PTREGS_OFF + PT_V9_TPC] | |
1756 | ba,pt %xcc, rtrap_clr_l6 | |
1757 | stx %l2, [%sp + PTREGS_OFF + PT_V9_TNPC] | |
1758 | ||
1759 | 1: | |
1760 | /* System call failure, set Carry condition code. | |
1761 | * Also, get abs(errno) to return to the process. | |
1762 | */ | |
1763 | andcc %l0, _TIF_SYSCALL_TRACE, %l6 | |
1764 | sub %g0, %o0, %o0 | |
1765 | or %g3, %g2, %g3 | |
1766 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I0] | |
1767 | mov 1, %l6 | |
1768 | stx %g3, [%sp + PTREGS_OFF + PT_V9_TSTATE] | |
1769 | bne,pn %icc, linux_syscall_trace2 | |
1770 | add %l1, 0x4, %l2 ! npc = npc+4 | |
1771 | stx %l1, [%sp + PTREGS_OFF + PT_V9_TPC] | |
1772 | ||
1773 | b,pt %xcc, rtrap | |
1774 | stx %l2, [%sp + PTREGS_OFF + PT_V9_TNPC] | |
1775 | linux_syscall_trace2: | |
1776 | call syscall_trace | |
1777 | nop | |
1778 | stx %l1, [%sp + PTREGS_OFF + PT_V9_TPC] | |
1779 | ba,pt %xcc, rtrap | |
1780 | stx %l2, [%sp + PTREGS_OFF + PT_V9_TNPC] | |
1781 | ||
1782 | .align 32 | |
1783 | .globl __flushw_user | |
1784 | __flushw_user: | |
1785 | rdpr %otherwin, %g1 | |
1786 | brz,pn %g1, 2f | |
1787 | clr %g2 | |
1788 | 1: save %sp, -128, %sp | |
1789 | rdpr %otherwin, %g1 | |
1790 | brnz,pt %g1, 1b | |
1791 | add %g2, 1, %g2 | |
1792 | 1: sub %g2, 1, %g2 | |
1793 | brnz,pt %g2, 1b | |
1794 | restore %g0, %g0, %g0 | |
1795 | 2: retl | |
1796 | nop |