Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | /* $Id: entry.S,v 1.144 2002/02/09 19:49:30 davem Exp $ |
2 | * arch/sparc64/kernel/entry.S: Sparc64 trap low-level entry points. | |
3 | * | |
4 | * Copyright (C) 1995,1997 David S. Miller (davem@caip.rutgers.edu) | |
5 | * Copyright (C) 1996 Eddie C. Dost (ecd@skynet.be) | |
6 | * Copyright (C) 1996 Miguel de Icaza (miguel@nuclecu.unam.mx) | |
7 | * Copyright (C) 1996,98,99 Jakub Jelinek (jj@sunsite.mff.cuni.cz) | |
8 | */ | |
9 | ||
10 | #include <linux/config.h> | |
11 | #include <linux/errno.h> | |
12 | ||
13 | #include <asm/head.h> | |
14 | #include <asm/asi.h> | |
15 | #include <asm/smp.h> | |
16 | #include <asm/ptrace.h> | |
17 | #include <asm/page.h> | |
18 | #include <asm/signal.h> | |
19 | #include <asm/pgtable.h> | |
20 | #include <asm/processor.h> | |
21 | #include <asm/visasm.h> | |
22 | #include <asm/estate.h> | |
23 | #include <asm/auxio.h> | |
24 | ||
1da177e4 LT |
25 | #define curptr g6 |
26 | ||
27 | #define NR_SYSCALLS 284 /* Each OS is different... */ | |
28 | ||
29 | .text | |
30 | .align 32 | |
31 | ||
32 | .globl sparc64_vpte_patchme1 | |
33 | .globl sparc64_vpte_patchme2 | |
34 | /* | |
35 | * On a second level vpte miss, check whether the original fault is to the OBP | |
36 | * range (note that this is only possible for instruction miss, data misses to | |
37 | * obp range do not use vpte). If so, go back directly to the faulting address. | |
38 | * This is because we want to read the tpc, otherwise we have no way of knowing | |
39 | * the 8k aligned faulting address if we are using >8k kernel pagesize. This | |
40 | * also ensures no vpte range addresses are dropped into tlb while obp is | |
41 | * executing (see inherit_locked_prom_mappings() rant). | |
42 | */ | |
43 | sparc64_vpte_nucleus: | |
44 | /* Load 0xf0000000, which is LOW_OBP_ADDRESS. */ | |
45 | mov 0xf, %g5 | |
46 | sllx %g5, 28, %g5 | |
47 | ||
48 | /* Is addr >= LOW_OBP_ADDRESS? */ | |
49 | cmp %g4, %g5 | |
50 | blu,pn %xcc, sparc64_vpte_patchme1 | |
51 | mov 0x1, %g5 | |
52 | ||
53 | /* Load 0x100000000, which is HI_OBP_ADDRESS. */ | |
54 | sllx %g5, 32, %g5 | |
55 | ||
56 | /* Is addr < HI_OBP_ADDRESS? */ | |
57 | cmp %g4, %g5 | |
58 | blu,pn %xcc, obp_iaddr_patch | |
59 | nop | |
60 | ||
61 | /* These two instructions are patched by paginig_init(). */ | |
62 | sparc64_vpte_patchme1: | |
63 | sethi %hi(0), %g5 | |
64 | sparc64_vpte_patchme2: | |
65 | or %g5, %lo(0), %g5 | |
66 | ||
67 | /* With kernel PGD in %g5, branch back into dtlb_backend. */ | |
68 | ba,pt %xcc, sparc64_kpte_continue | |
69 | andn %g1, 0x3, %g1 /* Finish PMD offset adjustment. */ | |
70 | ||
71 | vpte_noent: | |
72 | /* Restore previous TAG_ACCESS, %g5 is zero, and we will | |
73 | * skip over the trap instruction so that the top level | |
74 | * TLB miss handler will thing this %g5 value is just an | |
75 | * invalid PTE, thus branching to full fault processing. | |
76 | */ | |
77 | mov TLB_SFSR, %g1 | |
78 | stxa %g4, [%g1 + %g1] ASI_DMMU | |
79 | done | |
80 | ||
81 | .globl obp_iaddr_patch | |
82 | obp_iaddr_patch: | |
83 | /* These two instructions patched by inherit_prom_mappings(). */ | |
84 | sethi %hi(0), %g5 | |
85 | or %g5, %lo(0), %g5 | |
86 | ||
87 | /* Behave as if we are at TL0. */ | |
88 | wrpr %g0, 1, %tl | |
89 | rdpr %tpc, %g4 /* Find original faulting iaddr */ | |
90 | srlx %g4, 13, %g4 /* Throw out context bits */ | |
91 | sllx %g4, 13, %g4 /* g4 has vpn + ctx0 now */ | |
92 | ||
93 | /* Restore previous TAG_ACCESS. */ | |
94 | mov TLB_SFSR, %g1 | |
95 | stxa %g4, [%g1 + %g1] ASI_IMMU | |
96 | ||
97 | /* Get PMD offset. */ | |
98 | srlx %g4, 23, %g6 | |
99 | and %g6, 0x7ff, %g6 | |
100 | sllx %g6, 2, %g6 | |
101 | ||
102 | /* Load PMD, is it valid? */ | |
103 | lduwa [%g5 + %g6] ASI_PHYS_USE_EC, %g5 | |
104 | brz,pn %g5, longpath | |
105 | sllx %g5, 11, %g5 | |
106 | ||
107 | /* Get PTE offset. */ | |
108 | srlx %g4, 13, %g6 | |
109 | and %g6, 0x3ff, %g6 | |
110 | sllx %g6, 3, %g6 | |
111 | ||
112 | /* Load PTE. */ | |
113 | ldxa [%g5 + %g6] ASI_PHYS_USE_EC, %g5 | |
114 | brgez,pn %g5, longpath | |
115 | nop | |
116 | ||
117 | /* TLB load and return from trap. */ | |
118 | stxa %g5, [%g0] ASI_ITLB_DATA_IN | |
119 | retry | |
120 | ||
121 | .globl obp_daddr_patch | |
122 | obp_daddr_patch: | |
123 | /* These two instructions patched by inherit_prom_mappings(). */ | |
124 | sethi %hi(0), %g5 | |
125 | or %g5, %lo(0), %g5 | |
126 | ||
127 | /* Get PMD offset. */ | |
128 | srlx %g4, 23, %g6 | |
129 | and %g6, 0x7ff, %g6 | |
130 | sllx %g6, 2, %g6 | |
131 | ||
132 | /* Load PMD, is it valid? */ | |
133 | lduwa [%g5 + %g6] ASI_PHYS_USE_EC, %g5 | |
134 | brz,pn %g5, longpath | |
135 | sllx %g5, 11, %g5 | |
136 | ||
137 | /* Get PTE offset. */ | |
138 | srlx %g4, 13, %g6 | |
139 | and %g6, 0x3ff, %g6 | |
140 | sllx %g6, 3, %g6 | |
141 | ||
142 | /* Load PTE. */ | |
143 | ldxa [%g5 + %g6] ASI_PHYS_USE_EC, %g5 | |
144 | brgez,pn %g5, longpath | |
145 | nop | |
146 | ||
147 | /* TLB load and return from trap. */ | |
148 | stxa %g5, [%g0] ASI_DTLB_DATA_IN | |
149 | retry | |
150 | ||
151 | /* | |
152 | * On a first level data miss, check whether this is to the OBP range (note | |
153 | * that such accesses can be made by prom, as well as by kernel using | |
154 | * prom_getproperty on "address"), and if so, do not use vpte access ... | |
155 | * rather, use information saved during inherit_prom_mappings() using 8k | |
156 | * pagesize. | |
157 | */ | |
158 | kvmap: | |
159 | /* Load 0xf0000000, which is LOW_OBP_ADDRESS. */ | |
160 | mov 0xf, %g5 | |
161 | sllx %g5, 28, %g5 | |
162 | ||
163 | /* Is addr >= LOW_OBP_ADDRESS? */ | |
164 | cmp %g4, %g5 | |
165 | blu,pn %xcc, vmalloc_addr | |
166 | mov 0x1, %g5 | |
167 | ||
168 | /* Load 0x100000000, which is HI_OBP_ADDRESS. */ | |
169 | sllx %g5, 32, %g5 | |
170 | ||
171 | /* Is addr < HI_OBP_ADDRESS? */ | |
172 | cmp %g4, %g5 | |
173 | blu,pn %xcc, obp_daddr_patch | |
174 | nop | |
175 | ||
176 | vmalloc_addr: | |
177 | /* If we get here, a vmalloc addr accessed, load kernel VPTE. */ | |
178 | ldxa [%g3 + %g6] ASI_N, %g5 | |
179 | brgez,pn %g5, longpath | |
180 | nop | |
181 | ||
182 | /* PTE is valid, load into TLB and return from trap. */ | |
183 | stxa %g5, [%g0] ASI_DTLB_DATA_IN ! Reload TLB | |
184 | retry | |
185 | ||
186 | /* This is trivial with the new code... */ | |
187 | .globl do_fpdis | |
188 | do_fpdis: | |
189 | sethi %hi(TSTATE_PEF), %g4 ! IEU0 | |
190 | rdpr %tstate, %g5 | |
191 | andcc %g5, %g4, %g0 | |
192 | be,pt %xcc, 1f | |
193 | nop | |
194 | rd %fprs, %g5 | |
195 | andcc %g5, FPRS_FEF, %g0 | |
196 | be,pt %xcc, 1f | |
197 | nop | |
198 | ||
199 | /* Legal state when DCR_IFPOE is set in Cheetah %dcr. */ | |
200 | sethi %hi(109f), %g7 | |
201 | ba,pt %xcc, etrap | |
202 | 109: or %g7, %lo(109b), %g7 | |
203 | add %g0, %g0, %g0 | |
204 | ba,a,pt %xcc, rtrap_clr_l6 | |
205 | ||
206 | 1: ldub [%g6 + TI_FPSAVED], %g5 ! Load Group | |
207 | wr %g0, FPRS_FEF, %fprs ! LSU Group+4bubbles | |
208 | andcc %g5, FPRS_FEF, %g0 ! IEU1 Group | |
209 | be,a,pt %icc, 1f ! CTI | |
210 | clr %g7 ! IEU0 | |
211 | ldx [%g6 + TI_GSR], %g7 ! Load Group | |
212 | 1: andcc %g5, FPRS_DL, %g0 ! IEU1 | |
213 | bne,pn %icc, 2f ! CTI | |
214 | fzero %f0 ! FPA | |
215 | andcc %g5, FPRS_DU, %g0 ! IEU1 Group | |
216 | bne,pn %icc, 1f ! CTI | |
217 | fzero %f2 ! FPA | |
218 | faddd %f0, %f2, %f4 | |
219 | fmuld %f0, %f2, %f6 | |
220 | faddd %f0, %f2, %f8 | |
221 | fmuld %f0, %f2, %f10 | |
222 | faddd %f0, %f2, %f12 | |
223 | fmuld %f0, %f2, %f14 | |
224 | faddd %f0, %f2, %f16 | |
225 | fmuld %f0, %f2, %f18 | |
226 | faddd %f0, %f2, %f20 | |
227 | fmuld %f0, %f2, %f22 | |
228 | faddd %f0, %f2, %f24 | |
229 | fmuld %f0, %f2, %f26 | |
230 | faddd %f0, %f2, %f28 | |
231 | fmuld %f0, %f2, %f30 | |
232 | faddd %f0, %f2, %f32 | |
233 | fmuld %f0, %f2, %f34 | |
234 | faddd %f0, %f2, %f36 | |
235 | fmuld %f0, %f2, %f38 | |
236 | faddd %f0, %f2, %f40 | |
237 | fmuld %f0, %f2, %f42 | |
238 | faddd %f0, %f2, %f44 | |
239 | fmuld %f0, %f2, %f46 | |
240 | faddd %f0, %f2, %f48 | |
241 | fmuld %f0, %f2, %f50 | |
242 | faddd %f0, %f2, %f52 | |
243 | fmuld %f0, %f2, %f54 | |
244 | faddd %f0, %f2, %f56 | |
245 | fmuld %f0, %f2, %f58 | |
246 | b,pt %xcc, fpdis_exit2 | |
247 | faddd %f0, %f2, %f60 | |
248 | 1: mov SECONDARY_CONTEXT, %g3 | |
249 | add %g6, TI_FPREGS + 0x80, %g1 | |
250 | faddd %f0, %f2, %f4 | |
251 | fmuld %f0, %f2, %f6 | |
252 | ldxa [%g3] ASI_DMMU, %g5 | |
253 | cplus_fptrap_insn_1: | |
254 | sethi %hi(0), %g2 | |
255 | stxa %g2, [%g3] ASI_DMMU | |
256 | membar #Sync | |
257 | add %g6, TI_FPREGS + 0xc0, %g2 | |
258 | faddd %f0, %f2, %f8 | |
259 | fmuld %f0, %f2, %f10 | |
260 | ldda [%g1] ASI_BLK_S, %f32 ! grrr, where is ASI_BLK_NUCLEUS 8-( | |
261 | ldda [%g2] ASI_BLK_S, %f48 | |
262 | faddd %f0, %f2, %f12 | |
263 | fmuld %f0, %f2, %f14 | |
264 | faddd %f0, %f2, %f16 | |
265 | fmuld %f0, %f2, %f18 | |
266 | faddd %f0, %f2, %f20 | |
267 | fmuld %f0, %f2, %f22 | |
268 | faddd %f0, %f2, %f24 | |
269 | fmuld %f0, %f2, %f26 | |
270 | faddd %f0, %f2, %f28 | |
271 | fmuld %f0, %f2, %f30 | |
b445e26c | 272 | membar #Sync |
1da177e4 | 273 | b,pt %xcc, fpdis_exit |
b445e26c | 274 | nop |
1da177e4 LT |
275 | 2: andcc %g5, FPRS_DU, %g0 |
276 | bne,pt %icc, 3f | |
277 | fzero %f32 | |
278 | mov SECONDARY_CONTEXT, %g3 | |
279 | fzero %f34 | |
280 | ldxa [%g3] ASI_DMMU, %g5 | |
281 | add %g6, TI_FPREGS, %g1 | |
282 | cplus_fptrap_insn_2: | |
283 | sethi %hi(0), %g2 | |
284 | stxa %g2, [%g3] ASI_DMMU | |
285 | membar #Sync | |
286 | add %g6, TI_FPREGS + 0x40, %g2 | |
287 | faddd %f32, %f34, %f36 | |
288 | fmuld %f32, %f34, %f38 | |
289 | ldda [%g1] ASI_BLK_S, %f0 ! grrr, where is ASI_BLK_NUCLEUS 8-( | |
290 | ldda [%g2] ASI_BLK_S, %f16 | |
291 | faddd %f32, %f34, %f40 | |
292 | fmuld %f32, %f34, %f42 | |
293 | faddd %f32, %f34, %f44 | |
294 | fmuld %f32, %f34, %f46 | |
295 | faddd %f32, %f34, %f48 | |
296 | fmuld %f32, %f34, %f50 | |
297 | faddd %f32, %f34, %f52 | |
298 | fmuld %f32, %f34, %f54 | |
299 | faddd %f32, %f34, %f56 | |
300 | fmuld %f32, %f34, %f58 | |
301 | faddd %f32, %f34, %f60 | |
302 | fmuld %f32, %f34, %f62 | |
b445e26c | 303 | membar #Sync |
1da177e4 | 304 | ba,pt %xcc, fpdis_exit |
b445e26c | 305 | nop |
1da177e4 LT |
306 | 3: mov SECONDARY_CONTEXT, %g3 |
307 | add %g6, TI_FPREGS, %g1 | |
308 | ldxa [%g3] ASI_DMMU, %g5 | |
309 | cplus_fptrap_insn_3: | |
310 | sethi %hi(0), %g2 | |
311 | stxa %g2, [%g3] ASI_DMMU | |
312 | membar #Sync | |
313 | mov 0x40, %g2 | |
314 | ldda [%g1] ASI_BLK_S, %f0 ! grrr, where is ASI_BLK_NUCLEUS 8-( | |
315 | ldda [%g1 + %g2] ASI_BLK_S, %f16 | |
316 | add %g1, 0x80, %g1 | |
317 | ldda [%g1] ASI_BLK_S, %f32 | |
318 | ldda [%g1 + %g2] ASI_BLK_S, %f48 | |
319 | membar #Sync | |
320 | fpdis_exit: | |
321 | stxa %g5, [%g3] ASI_DMMU | |
322 | membar #Sync | |
323 | fpdis_exit2: | |
324 | wr %g7, 0, %gsr | |
325 | ldx [%g6 + TI_XFSR], %fsr | |
326 | rdpr %tstate, %g3 | |
327 | or %g3, %g4, %g3 ! anal... | |
328 | wrpr %g3, %tstate | |
329 | wr %g0, FPRS_FEF, %fprs ! clean DU/DL bits | |
330 | retry | |
331 | ||
332 | .align 32 | |
333 | fp_other_bounce: | |
334 | call do_fpother | |
335 | add %sp, PTREGS_OFF, %o0 | |
336 | ba,pt %xcc, rtrap | |
337 | clr %l6 | |
338 | ||
339 | .globl do_fpother_check_fitos | |
340 | .align 32 | |
341 | do_fpother_check_fitos: | |
342 | sethi %hi(fp_other_bounce - 4), %g7 | |
343 | or %g7, %lo(fp_other_bounce - 4), %g7 | |
344 | ||
345 | /* NOTE: Need to preserve %g7 until we fully commit | |
346 | * to the fitos fixup. | |
347 | */ | |
348 | stx %fsr, [%g6 + TI_XFSR] | |
349 | rdpr %tstate, %g3 | |
350 | andcc %g3, TSTATE_PRIV, %g0 | |
351 | bne,pn %xcc, do_fptrap_after_fsr | |
352 | nop | |
353 | ldx [%g6 + TI_XFSR], %g3 | |
354 | srlx %g3, 14, %g1 | |
355 | and %g1, 7, %g1 | |
356 | cmp %g1, 2 ! Unfinished FP-OP | |
357 | bne,pn %xcc, do_fptrap_after_fsr | |
358 | sethi %hi(1 << 23), %g1 ! Inexact | |
359 | andcc %g3, %g1, %g0 | |
360 | bne,pn %xcc, do_fptrap_after_fsr | |
361 | rdpr %tpc, %g1 | |
362 | lduwa [%g1] ASI_AIUP, %g3 ! This cannot ever fail | |
363 | #define FITOS_MASK 0xc1f83fe0 | |
364 | #define FITOS_COMPARE 0x81a01880 | |
365 | sethi %hi(FITOS_MASK), %g1 | |
366 | or %g1, %lo(FITOS_MASK), %g1 | |
367 | and %g3, %g1, %g1 | |
368 | sethi %hi(FITOS_COMPARE), %g2 | |
369 | or %g2, %lo(FITOS_COMPARE), %g2 | |
370 | cmp %g1, %g2 | |
371 | bne,pn %xcc, do_fptrap_after_fsr | |
372 | nop | |
373 | std %f62, [%g6 + TI_FPREGS + (62 * 4)] | |
374 | sethi %hi(fitos_table_1), %g1 | |
375 | and %g3, 0x1f, %g2 | |
376 | or %g1, %lo(fitos_table_1), %g1 | |
377 | sllx %g2, 2, %g2 | |
378 | jmpl %g1 + %g2, %g0 | |
379 | ba,pt %xcc, fitos_emul_continue | |
380 | ||
381 | fitos_table_1: | |
382 | fitod %f0, %f62 | |
383 | fitod %f1, %f62 | |
384 | fitod %f2, %f62 | |
385 | fitod %f3, %f62 | |
386 | fitod %f4, %f62 | |
387 | fitod %f5, %f62 | |
388 | fitod %f6, %f62 | |
389 | fitod %f7, %f62 | |
390 | fitod %f8, %f62 | |
391 | fitod %f9, %f62 | |
392 | fitod %f10, %f62 | |
393 | fitod %f11, %f62 | |
394 | fitod %f12, %f62 | |
395 | fitod %f13, %f62 | |
396 | fitod %f14, %f62 | |
397 | fitod %f15, %f62 | |
398 | fitod %f16, %f62 | |
399 | fitod %f17, %f62 | |
400 | fitod %f18, %f62 | |
401 | fitod %f19, %f62 | |
402 | fitod %f20, %f62 | |
403 | fitod %f21, %f62 | |
404 | fitod %f22, %f62 | |
405 | fitod %f23, %f62 | |
406 | fitod %f24, %f62 | |
407 | fitod %f25, %f62 | |
408 | fitod %f26, %f62 | |
409 | fitod %f27, %f62 | |
410 | fitod %f28, %f62 | |
411 | fitod %f29, %f62 | |
412 | fitod %f30, %f62 | |
413 | fitod %f31, %f62 | |
414 | ||
415 | fitos_emul_continue: | |
416 | sethi %hi(fitos_table_2), %g1 | |
417 | srl %g3, 25, %g2 | |
418 | or %g1, %lo(fitos_table_2), %g1 | |
419 | and %g2, 0x1f, %g2 | |
420 | sllx %g2, 2, %g2 | |
421 | jmpl %g1 + %g2, %g0 | |
422 | ba,pt %xcc, fitos_emul_fini | |
423 | ||
424 | fitos_table_2: | |
425 | fdtos %f62, %f0 | |
426 | fdtos %f62, %f1 | |
427 | fdtos %f62, %f2 | |
428 | fdtos %f62, %f3 | |
429 | fdtos %f62, %f4 | |
430 | fdtos %f62, %f5 | |
431 | fdtos %f62, %f6 | |
432 | fdtos %f62, %f7 | |
433 | fdtos %f62, %f8 | |
434 | fdtos %f62, %f9 | |
435 | fdtos %f62, %f10 | |
436 | fdtos %f62, %f11 | |
437 | fdtos %f62, %f12 | |
438 | fdtos %f62, %f13 | |
439 | fdtos %f62, %f14 | |
440 | fdtos %f62, %f15 | |
441 | fdtos %f62, %f16 | |
442 | fdtos %f62, %f17 | |
443 | fdtos %f62, %f18 | |
444 | fdtos %f62, %f19 | |
445 | fdtos %f62, %f20 | |
446 | fdtos %f62, %f21 | |
447 | fdtos %f62, %f22 | |
448 | fdtos %f62, %f23 | |
449 | fdtos %f62, %f24 | |
450 | fdtos %f62, %f25 | |
451 | fdtos %f62, %f26 | |
452 | fdtos %f62, %f27 | |
453 | fdtos %f62, %f28 | |
454 | fdtos %f62, %f29 | |
455 | fdtos %f62, %f30 | |
456 | fdtos %f62, %f31 | |
457 | ||
458 | fitos_emul_fini: | |
459 | ldd [%g6 + TI_FPREGS + (62 * 4)], %f62 | |
460 | done | |
461 | ||
462 | .globl do_fptrap | |
463 | .align 32 | |
464 | do_fptrap: | |
465 | stx %fsr, [%g6 + TI_XFSR] | |
466 | do_fptrap_after_fsr: | |
467 | ldub [%g6 + TI_FPSAVED], %g3 | |
468 | rd %fprs, %g1 | |
469 | or %g3, %g1, %g3 | |
470 | stb %g3, [%g6 + TI_FPSAVED] | |
471 | rd %gsr, %g3 | |
472 | stx %g3, [%g6 + TI_GSR] | |
473 | mov SECONDARY_CONTEXT, %g3 | |
474 | ldxa [%g3] ASI_DMMU, %g5 | |
475 | cplus_fptrap_insn_4: | |
476 | sethi %hi(0), %g2 | |
477 | stxa %g2, [%g3] ASI_DMMU | |
478 | membar #Sync | |
479 | add %g6, TI_FPREGS, %g2 | |
480 | andcc %g1, FPRS_DL, %g0 | |
481 | be,pn %icc, 4f | |
482 | mov 0x40, %g3 | |
483 | stda %f0, [%g2] ASI_BLK_S | |
484 | stda %f16, [%g2 + %g3] ASI_BLK_S | |
485 | andcc %g1, FPRS_DU, %g0 | |
486 | be,pn %icc, 5f | |
487 | 4: add %g2, 128, %g2 | |
488 | stda %f32, [%g2] ASI_BLK_S | |
489 | stda %f48, [%g2 + %g3] ASI_BLK_S | |
490 | 5: mov SECONDARY_CONTEXT, %g1 | |
491 | membar #Sync | |
492 | stxa %g5, [%g1] ASI_DMMU | |
493 | membar #Sync | |
494 | ba,pt %xcc, etrap | |
495 | wr %g0, 0, %fprs | |
496 | ||
497 | cplus_fptrap_1: | |
498 | sethi %hi(CTX_CHEETAH_PLUS_CTX0), %g2 | |
499 | ||
500 | .globl cheetah_plus_patch_fpdis | |
501 | cheetah_plus_patch_fpdis: | |
502 | /* We configure the dTLB512_0 for 4MB pages and the | |
503 | * dTLB512_1 for 8K pages when in context zero. | |
504 | */ | |
505 | sethi %hi(cplus_fptrap_1), %o0 | |
506 | lduw [%o0 + %lo(cplus_fptrap_1)], %o1 | |
507 | ||
508 | set cplus_fptrap_insn_1, %o2 | |
509 | stw %o1, [%o2] | |
510 | flush %o2 | |
511 | set cplus_fptrap_insn_2, %o2 | |
512 | stw %o1, [%o2] | |
513 | flush %o2 | |
514 | set cplus_fptrap_insn_3, %o2 | |
515 | stw %o1, [%o2] | |
516 | flush %o2 | |
517 | set cplus_fptrap_insn_4, %o2 | |
518 | stw %o1, [%o2] | |
519 | flush %o2 | |
520 | ||
521 | retl | |
522 | nop | |
523 | ||
524 | /* The registers for cross calls will be: | |
525 | * | |
526 | * DATA 0: [low 32-bits] Address of function to call, jmp to this | |
527 | * [high 32-bits] MMU Context Argument 0, place in %g5 | |
528 | * DATA 1: Address Argument 1, place in %g6 | |
529 | * DATA 2: Address Argument 2, place in %g7 | |
530 | * | |
531 | * With this method we can do most of the cross-call tlb/cache | |
532 | * flushing very quickly. | |
533 | * | |
534 | * Current CPU's IRQ worklist table is locked into %g1, | |
535 | * don't touch. | |
536 | */ | |
537 | .text | |
538 | .align 32 | |
539 | .globl do_ivec | |
540 | do_ivec: | |
541 | mov 0x40, %g3 | |
542 | ldxa [%g3 + %g0] ASI_INTR_R, %g3 | |
543 | sethi %hi(KERNBASE), %g4 | |
544 | cmp %g3, %g4 | |
545 | bgeu,pn %xcc, do_ivec_xcall | |
546 | srlx %g3, 32, %g5 | |
547 | stxa %g0, [%g0] ASI_INTR_RECEIVE | |
548 | membar #Sync | |
549 | ||
550 | sethi %hi(ivector_table), %g2 | |
551 | sllx %g3, 5, %g3 | |
552 | or %g2, %lo(ivector_table), %g2 | |
553 | add %g2, %g3, %g3 | |
1da177e4 | 554 | ldub [%g3 + 0x04], %g4 /* pil */ |
088dd1f8 | 555 | mov 1, %g2 |
1da177e4 LT |
556 | sllx %g2, %g4, %g2 |
557 | sllx %g4, 2, %g4 | |
088dd1f8 | 558 | |
1da177e4 LT |
559 | lduw [%g6 + %g4], %g5 /* g5 = irq_work(cpu, pil) */ |
560 | stw %g5, [%g3 + 0x00] /* bucket->irq_chain = g5 */ | |
561 | stw %g3, [%g6 + %g4] /* irq_work(cpu, pil) = bucket */ | |
562 | wr %g2, 0x0, %set_softint | |
563 | retry | |
564 | do_ivec_xcall: | |
565 | mov 0x50, %g1 | |
1da177e4 LT |
566 | ldxa [%g1 + %g0] ASI_INTR_R, %g1 |
567 | srl %g3, 0, %g3 | |
088dd1f8 | 568 | |
1da177e4 LT |
569 | mov 0x60, %g7 |
570 | ldxa [%g7 + %g0] ASI_INTR_R, %g7 | |
571 | stxa %g0, [%g0] ASI_INTR_RECEIVE | |
572 | membar #Sync | |
573 | ba,pt %xcc, 1f | |
574 | nop | |
575 | ||
576 | .align 32 | |
577 | 1: jmpl %g3, %g0 | |
578 | nop | |
579 | ||
1da177e4 LT |
580 | .globl save_alternate_globals |
581 | save_alternate_globals: /* %o0 = save_area */ | |
582 | rdpr %pstate, %o5 | |
583 | andn %o5, PSTATE_IE, %o1 | |
584 | wrpr %o1, PSTATE_AG, %pstate | |
585 | stx %g0, [%o0 + 0x00] | |
586 | stx %g1, [%o0 + 0x08] | |
587 | stx %g2, [%o0 + 0x10] | |
588 | stx %g3, [%o0 + 0x18] | |
589 | stx %g4, [%o0 + 0x20] | |
590 | stx %g5, [%o0 + 0x28] | |
591 | stx %g6, [%o0 + 0x30] | |
592 | stx %g7, [%o0 + 0x38] | |
593 | wrpr %o1, PSTATE_IG, %pstate | |
594 | stx %g0, [%o0 + 0x40] | |
595 | stx %g1, [%o0 + 0x48] | |
596 | stx %g2, [%o0 + 0x50] | |
597 | stx %g3, [%o0 + 0x58] | |
598 | stx %g4, [%o0 + 0x60] | |
599 | stx %g5, [%o0 + 0x68] | |
600 | stx %g6, [%o0 + 0x70] | |
601 | stx %g7, [%o0 + 0x78] | |
602 | wrpr %o1, PSTATE_MG, %pstate | |
603 | stx %g0, [%o0 + 0x80] | |
604 | stx %g1, [%o0 + 0x88] | |
605 | stx %g2, [%o0 + 0x90] | |
606 | stx %g3, [%o0 + 0x98] | |
607 | stx %g4, [%o0 + 0xa0] | |
608 | stx %g5, [%o0 + 0xa8] | |
609 | stx %g6, [%o0 + 0xb0] | |
610 | stx %g7, [%o0 + 0xb8] | |
611 | wrpr %o5, 0x0, %pstate | |
612 | retl | |
613 | nop | |
614 | ||
615 | .globl restore_alternate_globals | |
616 | restore_alternate_globals: /* %o0 = save_area */ | |
617 | rdpr %pstate, %o5 | |
618 | andn %o5, PSTATE_IE, %o1 | |
619 | wrpr %o1, PSTATE_AG, %pstate | |
620 | ldx [%o0 + 0x00], %g0 | |
621 | ldx [%o0 + 0x08], %g1 | |
622 | ldx [%o0 + 0x10], %g2 | |
623 | ldx [%o0 + 0x18], %g3 | |
624 | ldx [%o0 + 0x20], %g4 | |
625 | ldx [%o0 + 0x28], %g5 | |
626 | ldx [%o0 + 0x30], %g6 | |
627 | ldx [%o0 + 0x38], %g7 | |
628 | wrpr %o1, PSTATE_IG, %pstate | |
629 | ldx [%o0 + 0x40], %g0 | |
630 | ldx [%o0 + 0x48], %g1 | |
631 | ldx [%o0 + 0x50], %g2 | |
632 | ldx [%o0 + 0x58], %g3 | |
633 | ldx [%o0 + 0x60], %g4 | |
634 | ldx [%o0 + 0x68], %g5 | |
635 | ldx [%o0 + 0x70], %g6 | |
636 | ldx [%o0 + 0x78], %g7 | |
637 | wrpr %o1, PSTATE_MG, %pstate | |
638 | ldx [%o0 + 0x80], %g0 | |
639 | ldx [%o0 + 0x88], %g1 | |
640 | ldx [%o0 + 0x90], %g2 | |
641 | ldx [%o0 + 0x98], %g3 | |
642 | ldx [%o0 + 0xa0], %g4 | |
643 | ldx [%o0 + 0xa8], %g5 | |
644 | ldx [%o0 + 0xb0], %g6 | |
645 | ldx [%o0 + 0xb8], %g7 | |
646 | wrpr %o5, 0x0, %pstate | |
647 | retl | |
648 | nop | |
649 | ||
650 | .globl getcc, setcc | |
651 | getcc: | |
652 | ldx [%o0 + PT_V9_TSTATE], %o1 | |
653 | srlx %o1, 32, %o1 | |
654 | and %o1, 0xf, %o1 | |
655 | retl | |
656 | stx %o1, [%o0 + PT_V9_G1] | |
657 | setcc: | |
658 | ldx [%o0 + PT_V9_TSTATE], %o1 | |
659 | ldx [%o0 + PT_V9_G1], %o2 | |
660 | or %g0, %ulo(TSTATE_ICC), %o3 | |
661 | sllx %o3, 32, %o3 | |
662 | andn %o1, %o3, %o1 | |
663 | sllx %o2, 32, %o2 | |
664 | and %o2, %o3, %o2 | |
665 | or %o1, %o2, %o1 | |
666 | retl | |
667 | stx %o1, [%o0 + PT_V9_TSTATE] | |
668 | ||
669 | .globl utrap, utrap_ill | |
670 | utrap: brz,pn %g1, etrap | |
671 | nop | |
672 | save %sp, -128, %sp | |
673 | rdpr %tstate, %l6 | |
674 | rdpr %cwp, %l7 | |
675 | andn %l6, TSTATE_CWP, %l6 | |
676 | wrpr %l6, %l7, %tstate | |
677 | rdpr %tpc, %l6 | |
678 | rdpr %tnpc, %l7 | |
679 | wrpr %g1, 0, %tnpc | |
680 | done | |
681 | utrap_ill: | |
682 | call bad_trap | |
683 | add %sp, PTREGS_OFF, %o0 | |
684 | ba,pt %xcc, rtrap | |
685 | clr %l6 | |
686 | ||
1da177e4 LT |
687 | /* XXX Here is stuff we still need to write... -DaveM XXX */ |
688 | .globl netbsd_syscall | |
689 | netbsd_syscall: | |
690 | retl | |
691 | nop | |
692 | ||
693 | /* These next few routines must be sure to clear the | |
694 | * SFSR FaultValid bit so that the fast tlb data protection | |
695 | * handler does not flush the wrong context and lock up the | |
696 | * box. | |
697 | */ | |
698 | .globl __do_data_access_exception | |
699 | .globl __do_data_access_exception_tl1 | |
700 | __do_data_access_exception_tl1: | |
701 | rdpr %pstate, %g4 | |
702 | wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate | |
703 | mov TLB_SFSR, %g3 | |
704 | mov DMMU_SFAR, %g5 | |
705 | ldxa [%g3] ASI_DMMU, %g4 ! Get SFSR | |
706 | ldxa [%g5] ASI_DMMU, %g5 ! Get SFAR | |
707 | stxa %g0, [%g3] ASI_DMMU ! Clear SFSR.FaultValid bit | |
708 | membar #Sync | |
709 | ba,pt %xcc, winfix_dax | |
710 | rdpr %tpc, %g3 | |
711 | __do_data_access_exception: | |
712 | rdpr %pstate, %g4 | |
713 | wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate | |
714 | mov TLB_SFSR, %g3 | |
715 | mov DMMU_SFAR, %g5 | |
716 | ldxa [%g3] ASI_DMMU, %g4 ! Get SFSR | |
717 | ldxa [%g5] ASI_DMMU, %g5 ! Get SFAR | |
718 | stxa %g0, [%g3] ASI_DMMU ! Clear SFSR.FaultValid bit | |
719 | membar #Sync | |
720 | sethi %hi(109f), %g7 | |
721 | ba,pt %xcc, etrap | |
722 | 109: or %g7, %lo(109b), %g7 | |
723 | mov %l4, %o1 | |
724 | mov %l5, %o2 | |
725 | call data_access_exception | |
726 | add %sp, PTREGS_OFF, %o0 | |
727 | ba,pt %xcc, rtrap | |
728 | clr %l6 | |
729 | ||
730 | .globl __do_instruction_access_exception | |
731 | .globl __do_instruction_access_exception_tl1 | |
732 | __do_instruction_access_exception_tl1: | |
733 | rdpr %pstate, %g4 | |
734 | wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate | |
735 | mov TLB_SFSR, %g3 | |
736 | mov DMMU_SFAR, %g5 | |
737 | ldxa [%g3] ASI_DMMU, %g4 ! Get SFSR | |
738 | ldxa [%g5] ASI_DMMU, %g5 ! Get SFAR | |
739 | stxa %g0, [%g3] ASI_IMMU ! Clear FaultValid bit | |
740 | membar #Sync | |
741 | sethi %hi(109f), %g7 | |
742 | ba,pt %xcc, etraptl1 | |
743 | 109: or %g7, %lo(109b), %g7 | |
744 | mov %l4, %o1 | |
745 | mov %l5, %o2 | |
746 | call instruction_access_exception_tl1 | |
747 | add %sp, PTREGS_OFF, %o0 | |
748 | ba,pt %xcc, rtrap | |
749 | clr %l6 | |
750 | ||
751 | __do_instruction_access_exception: | |
752 | rdpr %pstate, %g4 | |
753 | wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate | |
754 | mov TLB_SFSR, %g3 | |
755 | mov DMMU_SFAR, %g5 | |
756 | ldxa [%g3] ASI_DMMU, %g4 ! Get SFSR | |
757 | ldxa [%g5] ASI_DMMU, %g5 ! Get SFAR | |
758 | stxa %g0, [%g3] ASI_IMMU ! Clear FaultValid bit | |
759 | membar #Sync | |
760 | sethi %hi(109f), %g7 | |
761 | ba,pt %xcc, etrap | |
762 | 109: or %g7, %lo(109b), %g7 | |
763 | mov %l4, %o1 | |
764 | mov %l5, %o2 | |
765 | call instruction_access_exception | |
766 | add %sp, PTREGS_OFF, %o0 | |
767 | ba,pt %xcc, rtrap | |
768 | clr %l6 | |
769 | ||
770 | /* This is the trap handler entry point for ECC correctable | |
771 | * errors. They are corrected, but we listen for the trap | |
772 | * so that the event can be logged. | |
773 | * | |
774 | * Disrupting errors are either: | |
775 | * 1) single-bit ECC errors during UDB reads to system | |
776 | * memory | |
777 | * 2) data parity errors during write-back events | |
778 | * | |
779 | * As far as I can make out from the manual, the CEE trap | |
780 | * is only for correctable errors during memory read | |
781 | * accesses by the front-end of the processor. | |
782 | * | |
783 | * The code below is only for trap level 1 CEE events, | |
784 | * as it is the only situation where we can safely record | |
785 | * and log. For trap level >1 we just clear the CE bit | |
786 | * in the AFSR and return. | |
787 | */ | |
788 | ||
789 | /* Our trap handling infrastructure allows us to preserve | |
790 | * two 64-bit values during etrap for arguments to | |
791 | * subsequent C code. Therefore we encode the information | |
792 | * as follows: | |
793 | * | |
794 | * value 1) Full 64-bits of AFAR | |
795 | * value 2) Low 33-bits of AFSR, then bits 33-->42 | |
796 | * are UDBL error status and bits 43-->52 | |
797 | * are UDBH error status | |
798 | */ | |
799 | .align 64 | |
800 | .globl cee_trap | |
801 | cee_trap: | |
802 | ldxa [%g0] ASI_AFSR, %g1 ! Read AFSR | |
803 | ldxa [%g0] ASI_AFAR, %g2 ! Read AFAR | |
804 | sllx %g1, 31, %g1 ! Clear reserved bits | |
805 | srlx %g1, 31, %g1 ! in AFSR | |
806 | ||
807 | /* NOTE: UltraSparc-I/II have high and low UDB error | |
808 | * registers, corresponding to the two UDB units | |
809 | * present on those chips. UltraSparc-IIi only | |
810 | * has a single UDB, called "SDB" in the manual. | |
811 | * For IIi the upper UDB register always reads | |
812 | * as zero so for our purposes things will just | |
813 | * work with the checks below. | |
814 | */ | |
815 | ldxa [%g0] ASI_UDBL_ERROR_R, %g3 ! Read UDB-Low error status | |
816 | andcc %g3, (1 << 8), %g4 ! Check CE bit | |
817 | sllx %g3, (64 - 10), %g3 ! Clear reserved bits | |
818 | srlx %g3, (64 - 10), %g3 ! in UDB-Low error status | |
819 | ||
820 | sllx %g3, (33 + 0), %g3 ! Shift up to encoding area | |
821 | or %g1, %g3, %g1 ! Or it in | |
822 | be,pn %xcc, 1f ! Branch if CE bit was clear | |
823 | nop | |
824 | stxa %g4, [%g0] ASI_UDB_ERROR_W ! Clear CE sticky bit in UDBL | |
825 | membar #Sync ! Synchronize ASI stores | |
826 | 1: mov 0x18, %g5 ! Addr of UDB-High error status | |
827 | ldxa [%g5] ASI_UDBH_ERROR_R, %g3 ! Read it | |
828 | ||
829 | andcc %g3, (1 << 8), %g4 ! Check CE bit | |
830 | sllx %g3, (64 - 10), %g3 ! Clear reserved bits | |
831 | srlx %g3, (64 - 10), %g3 ! in UDB-High error status | |
832 | sllx %g3, (33 + 10), %g3 ! Shift up to encoding area | |
833 | or %g1, %g3, %g1 ! Or it in | |
834 | be,pn %xcc, 1f ! Branch if CE bit was clear | |
835 | nop | |
836 | nop | |
837 | ||
838 | stxa %g4, [%g5] ASI_UDB_ERROR_W ! Clear CE sticky bit in UDBH | |
839 | membar #Sync ! Synchronize ASI stores | |
840 | 1: mov 1, %g5 ! AFSR CE bit is | |
841 | sllx %g5, 20, %g5 ! bit 20 | |
842 | stxa %g5, [%g0] ASI_AFSR ! Clear CE sticky bit in AFSR | |
843 | membar #Sync ! Synchronize ASI stores | |
844 | sllx %g2, (64 - 41), %g2 ! Clear reserved bits | |
845 | srlx %g2, (64 - 41), %g2 ! in latched AFAR | |
846 | ||
847 | andn %g2, 0x0f, %g2 ! Finish resv bit clearing | |
848 | mov %g1, %g4 ! Move AFSR+UDB* into save reg | |
849 | mov %g2, %g5 ! Move AFAR into save reg | |
850 | rdpr %pil, %g2 | |
851 | wrpr %g0, 15, %pil | |
852 | ba,pt %xcc, etrap_irq | |
853 | rd %pc, %g7 | |
854 | mov %l4, %o0 | |
855 | ||
856 | mov %l5, %o1 | |
857 | call cee_log | |
858 | add %sp, PTREGS_OFF, %o2 | |
859 | ba,a,pt %xcc, rtrap_irq | |
860 | ||
861 | /* Capture I/D/E-cache state into per-cpu error scoreboard. | |
862 | * | |
863 | * %g1: (TL>=0) ? 1 : 0 | |
864 | * %g2: scratch | |
865 | * %g3: scratch | |
866 | * %g4: AFSR | |
867 | * %g5: AFAR | |
868 | * %g6: current thread ptr | |
869 | * %g7: scratch | |
870 | */ | |
871 | #define CHEETAH_LOG_ERROR \ | |
872 | /* Put "TL1" software bit into AFSR. */ \ | |
873 | and %g1, 0x1, %g1; \ | |
874 | sllx %g1, 63, %g2; \ | |
875 | or %g4, %g2, %g4; \ | |
876 | /* Get log entry pointer for this cpu at this trap level. */ \ | |
877 | BRANCH_IF_JALAPENO(g2,g3,50f) \ | |
878 | ldxa [%g0] ASI_SAFARI_CONFIG, %g2; \ | |
879 | srlx %g2, 17, %g2; \ | |
880 | ba,pt %xcc, 60f; \ | |
881 | and %g2, 0x3ff, %g2; \ | |
882 | 50: ldxa [%g0] ASI_JBUS_CONFIG, %g2; \ | |
883 | srlx %g2, 17, %g2; \ | |
884 | and %g2, 0x1f, %g2; \ | |
885 | 60: sllx %g2, 9, %g2; \ | |
886 | sethi %hi(cheetah_error_log), %g3; \ | |
887 | ldx [%g3 + %lo(cheetah_error_log)], %g3; \ | |
888 | brz,pn %g3, 80f; \ | |
889 | nop; \ | |
890 | add %g3, %g2, %g3; \ | |
891 | sllx %g1, 8, %g1; \ | |
892 | add %g3, %g1, %g1; \ | |
893 | /* %g1 holds pointer to the top of the logging scoreboard */ \ | |
894 | ldx [%g1 + 0x0], %g7; \ | |
895 | cmp %g7, -1; \ | |
896 | bne,pn %xcc, 80f; \ | |
897 | nop; \ | |
898 | stx %g4, [%g1 + 0x0]; \ | |
899 | stx %g5, [%g1 + 0x8]; \ | |
900 | add %g1, 0x10, %g1; \ | |
901 | /* %g1 now points to D-cache logging area */ \ | |
902 | set 0x3ff8, %g2; /* DC_addr mask */ \ | |
903 | and %g5, %g2, %g2; /* DC_addr bits of AFAR */ \ | |
904 | srlx %g5, 12, %g3; \ | |
905 | or %g3, 1, %g3; /* PHYS tag + valid */ \ | |
906 | 10: ldxa [%g2] ASI_DCACHE_TAG, %g7; \ | |
907 | cmp %g3, %g7; /* TAG match? */ \ | |
908 | bne,pt %xcc, 13f; \ | |
909 | nop; \ | |
910 | /* Yep, what we want, capture state. */ \ | |
911 | stx %g2, [%g1 + 0x20]; \ | |
912 | stx %g7, [%g1 + 0x28]; \ | |
913 | /* A membar Sync is required before and after utag access. */ \ | |
914 | membar #Sync; \ | |
915 | ldxa [%g2] ASI_DCACHE_UTAG, %g7; \ | |
916 | membar #Sync; \ | |
917 | stx %g7, [%g1 + 0x30]; \ | |
918 | ldxa [%g2] ASI_DCACHE_SNOOP_TAG, %g7; \ | |
919 | stx %g7, [%g1 + 0x38]; \ | |
920 | clr %g3; \ | |
921 | 12: ldxa [%g2 + %g3] ASI_DCACHE_DATA, %g7; \ | |
922 | stx %g7, [%g1]; \ | |
923 | add %g3, (1 << 5), %g3; \ | |
924 | cmp %g3, (4 << 5); \ | |
925 | bl,pt %xcc, 12b; \ | |
926 | add %g1, 0x8, %g1; \ | |
927 | ba,pt %xcc, 20f; \ | |
928 | add %g1, 0x20, %g1; \ | |
929 | 13: sethi %hi(1 << 14), %g7; \ | |
930 | add %g2, %g7, %g2; \ | |
931 | srlx %g2, 14, %g7; \ | |
932 | cmp %g7, 4; \ | |
933 | bl,pt %xcc, 10b; \ | |
934 | nop; \ | |
935 | add %g1, 0x40, %g1; \ | |
936 | 20: /* %g1 now points to I-cache logging area */ \ | |
937 | set 0x1fe0, %g2; /* IC_addr mask */ \ | |
938 | and %g5, %g2, %g2; /* IC_addr bits of AFAR */ \ | |
939 | sllx %g2, 1, %g2; /* IC_addr[13:6]==VA[12:5] */ \ | |
940 | srlx %g5, (13 - 8), %g3; /* Make PTAG */ \ | |
941 | andn %g3, 0xff, %g3; /* Mask off undefined bits */ \ | |
942 | 21: ldxa [%g2] ASI_IC_TAG, %g7; \ | |
943 | andn %g7, 0xff, %g7; \ | |
944 | cmp %g3, %g7; \ | |
945 | bne,pt %xcc, 23f; \ | |
946 | nop; \ | |
947 | /* Yep, what we want, capture state. */ \ | |
948 | stx %g2, [%g1 + 0x40]; \ | |
949 | stx %g7, [%g1 + 0x48]; \ | |
950 | add %g2, (1 << 3), %g2; \ | |
951 | ldxa [%g2] ASI_IC_TAG, %g7; \ | |
952 | add %g2, (1 << 3), %g2; \ | |
953 | stx %g7, [%g1 + 0x50]; \ | |
954 | ldxa [%g2] ASI_IC_TAG, %g7; \ | |
955 | add %g2, (1 << 3), %g2; \ | |
956 | stx %g7, [%g1 + 0x60]; \ | |
957 | ldxa [%g2] ASI_IC_TAG, %g7; \ | |
958 | stx %g7, [%g1 + 0x68]; \ | |
959 | sub %g2, (3 << 3), %g2; \ | |
960 | ldxa [%g2] ASI_IC_STAG, %g7; \ | |
961 | stx %g7, [%g1 + 0x58]; \ | |
962 | clr %g3; \ | |
963 | srlx %g2, 2, %g2; \ | |
964 | 22: ldxa [%g2 + %g3] ASI_IC_INSTR, %g7; \ | |
965 | stx %g7, [%g1]; \ | |
966 | add %g3, (1 << 3), %g3; \ | |
967 | cmp %g3, (8 << 3); \ | |
968 | bl,pt %xcc, 22b; \ | |
969 | add %g1, 0x8, %g1; \ | |
970 | ba,pt %xcc, 30f; \ | |
971 | add %g1, 0x30, %g1; \ | |
972 | 23: sethi %hi(1 << 14), %g7; \ | |
973 | add %g2, %g7, %g2; \ | |
974 | srlx %g2, 14, %g7; \ | |
975 | cmp %g7, 4; \ | |
976 | bl,pt %xcc, 21b; \ | |
977 | nop; \ | |
978 | add %g1, 0x70, %g1; \ | |
979 | 30: /* %g1 now points to E-cache logging area */ \ | |
980 | andn %g5, (32 - 1), %g2; /* E-cache subblock */ \ | |
981 | stx %g2, [%g1 + 0x20]; \ | |
982 | ldxa [%g2] ASI_EC_TAG_DATA, %g7; \ | |
983 | stx %g7, [%g1 + 0x28]; \ | |
984 | ldxa [%g2] ASI_EC_R, %g0; \ | |
985 | clr %g3; \ | |
986 | 31: ldxa [%g3] ASI_EC_DATA, %g7; \ | |
987 | stx %g7, [%g1 + %g3]; \ | |
988 | add %g3, 0x8, %g3; \ | |
989 | cmp %g3, 0x20; \ | |
990 | bl,pt %xcc, 31b; \ | |
991 | nop; \ | |
992 | 80: /* DONE */ | |
993 | ||
994 | /* These get patched into the trap table at boot time | |
995 | * once we know we have a cheetah processor. | |
996 | */ | |
997 | .globl cheetah_fecc_trap_vector, cheetah_fecc_trap_vector_tl1 | |
998 | cheetah_fecc_trap_vector: | |
999 | membar #Sync | |
1000 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1 | |
1001 | andn %g1, DCU_DC | DCU_IC, %g1 | |
1002 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG | |
1003 | membar #Sync | |
1004 | sethi %hi(cheetah_fast_ecc), %g2 | |
1005 | jmpl %g2 + %lo(cheetah_fast_ecc), %g0 | |
1006 | mov 0, %g1 | |
1007 | cheetah_fecc_trap_vector_tl1: | |
1008 | membar #Sync | |
1009 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1 | |
1010 | andn %g1, DCU_DC | DCU_IC, %g1 | |
1011 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG | |
1012 | membar #Sync | |
1013 | sethi %hi(cheetah_fast_ecc), %g2 | |
1014 | jmpl %g2 + %lo(cheetah_fast_ecc), %g0 | |
1015 | mov 1, %g1 | |
1016 | .globl cheetah_cee_trap_vector, cheetah_cee_trap_vector_tl1 | |
1017 | cheetah_cee_trap_vector: | |
1018 | membar #Sync | |
1019 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1 | |
1020 | andn %g1, DCU_IC, %g1 | |
1021 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG | |
1022 | membar #Sync | |
1023 | sethi %hi(cheetah_cee), %g2 | |
1024 | jmpl %g2 + %lo(cheetah_cee), %g0 | |
1025 | mov 0, %g1 | |
1026 | cheetah_cee_trap_vector_tl1: | |
1027 | membar #Sync | |
1028 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1 | |
1029 | andn %g1, DCU_IC, %g1 | |
1030 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG | |
1031 | membar #Sync | |
1032 | sethi %hi(cheetah_cee), %g2 | |
1033 | jmpl %g2 + %lo(cheetah_cee), %g0 | |
1034 | mov 1, %g1 | |
1035 | .globl cheetah_deferred_trap_vector, cheetah_deferred_trap_vector_tl1 | |
1036 | cheetah_deferred_trap_vector: | |
1037 | membar #Sync | |
1038 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1; | |
1039 | andn %g1, DCU_DC | DCU_IC, %g1; | |
1040 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG; | |
1041 | membar #Sync; | |
1042 | sethi %hi(cheetah_deferred_trap), %g2 | |
1043 | jmpl %g2 + %lo(cheetah_deferred_trap), %g0 | |
1044 | mov 0, %g1 | |
1045 | cheetah_deferred_trap_vector_tl1: | |
1046 | membar #Sync; | |
1047 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1; | |
1048 | andn %g1, DCU_DC | DCU_IC, %g1; | |
1049 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG; | |
1050 | membar #Sync; | |
1051 | sethi %hi(cheetah_deferred_trap), %g2 | |
1052 | jmpl %g2 + %lo(cheetah_deferred_trap), %g0 | |
1053 | mov 1, %g1 | |
1054 | ||
1055 | /* Cheetah+ specific traps. These are for the new I/D cache parity | |
1056 | * error traps. The first argument to cheetah_plus_parity_handler | |
1057 | * is encoded as follows: | |
1058 | * | |
1059 | * Bit0: 0=dcache,1=icache | |
1060 | * Bit1: 0=recoverable,1=unrecoverable | |
1061 | */ | |
1062 | .globl cheetah_plus_dcpe_trap_vector, cheetah_plus_dcpe_trap_vector_tl1 | |
1063 | cheetah_plus_dcpe_trap_vector: | |
1064 | membar #Sync | |
1065 | sethi %hi(do_cheetah_plus_data_parity), %g7 | |
1066 | jmpl %g7 + %lo(do_cheetah_plus_data_parity), %g0 | |
1067 | nop | |
1068 | nop | |
1069 | nop | |
1070 | nop | |
1071 | nop | |
1072 | ||
1073 | do_cheetah_plus_data_parity: | |
1074 | ba,pt %xcc, etrap | |
1075 | rd %pc, %g7 | |
1076 | mov 0x0, %o0 | |
1077 | call cheetah_plus_parity_error | |
1078 | add %sp, PTREGS_OFF, %o1 | |
1079 | ba,pt %xcc, rtrap | |
1080 | clr %l6 | |
1081 | ||
1082 | cheetah_plus_dcpe_trap_vector_tl1: | |
1083 | membar #Sync | |
1084 | wrpr PSTATE_IG | PSTATE_PEF | PSTATE_PRIV, %pstate | |
1085 | sethi %hi(do_dcpe_tl1), %g3 | |
1086 | jmpl %g3 + %lo(do_dcpe_tl1), %g0 | |
1087 | nop | |
1088 | nop | |
1089 | nop | |
1090 | nop | |
1091 | ||
1092 | .globl cheetah_plus_icpe_trap_vector, cheetah_plus_icpe_trap_vector_tl1 | |
1093 | cheetah_plus_icpe_trap_vector: | |
1094 | membar #Sync | |
1095 | sethi %hi(do_cheetah_plus_insn_parity), %g7 | |
1096 | jmpl %g7 + %lo(do_cheetah_plus_insn_parity), %g0 | |
1097 | nop | |
1098 | nop | |
1099 | nop | |
1100 | nop | |
1101 | nop | |
1102 | ||
1103 | do_cheetah_plus_insn_parity: | |
1104 | ba,pt %xcc, etrap | |
1105 | rd %pc, %g7 | |
1106 | mov 0x1, %o0 | |
1107 | call cheetah_plus_parity_error | |
1108 | add %sp, PTREGS_OFF, %o1 | |
1109 | ba,pt %xcc, rtrap | |
1110 | clr %l6 | |
1111 | ||
1112 | cheetah_plus_icpe_trap_vector_tl1: | |
1113 | membar #Sync | |
1114 | wrpr PSTATE_IG | PSTATE_PEF | PSTATE_PRIV, %pstate | |
1115 | sethi %hi(do_icpe_tl1), %g3 | |
1116 | jmpl %g3 + %lo(do_icpe_tl1), %g0 | |
1117 | nop | |
1118 | nop | |
1119 | nop | |
1120 | nop | |
1121 | ||
1122 | /* If we take one of these traps when tl >= 1, then we | |
1123 | * jump to interrupt globals. If some trap level above us | |
1124 | * was also using interrupt globals, we cannot recover. | |
1125 | * We may use all interrupt global registers except %g6. | |
1126 | */ | |
1127 | .globl do_dcpe_tl1, do_icpe_tl1 | |
1128 | do_dcpe_tl1: | |
1129 | rdpr %tl, %g1 ! Save original trap level | |
1130 | mov 1, %g2 ! Setup TSTATE checking loop | |
1131 | sethi %hi(TSTATE_IG), %g3 ! TSTATE mask bit | |
1132 | 1: wrpr %g2, %tl ! Set trap level to check | |
1133 | rdpr %tstate, %g4 ! Read TSTATE for this level | |
1134 | andcc %g4, %g3, %g0 ! Interrupt globals in use? | |
1135 | bne,a,pn %xcc, do_dcpe_tl1_fatal ! Yep, irrecoverable | |
1136 | wrpr %g1, %tl ! Restore original trap level | |
1137 | add %g2, 1, %g2 ! Next trap level | |
1138 | cmp %g2, %g1 ! Hit them all yet? | |
1139 | ble,pt %icc, 1b ! Not yet | |
1140 | nop | |
1141 | wrpr %g1, %tl ! Restore original trap level | |
1142 | do_dcpe_tl1_nonfatal: /* Ok we may use interrupt globals safely. */ | |
1143 | /* Reset D-cache parity */ | |
1144 | sethi %hi(1 << 16), %g1 ! D-cache size | |
1145 | mov (1 << 5), %g2 ! D-cache line size | |
1146 | sub %g1, %g2, %g1 ! Move down 1 cacheline | |
1147 | 1: srl %g1, 14, %g3 ! Compute UTAG | |
1148 | membar #Sync | |
1149 | stxa %g3, [%g1] ASI_DCACHE_UTAG | |
1150 | membar #Sync | |
1151 | sub %g2, 8, %g3 ! 64-bit data word within line | |
1152 | 2: membar #Sync | |
1153 | stxa %g0, [%g1 + %g3] ASI_DCACHE_DATA | |
1154 | membar #Sync | |
1155 | subcc %g3, 8, %g3 ! Next 64-bit data word | |
1156 | bge,pt %icc, 2b | |
1157 | nop | |
1158 | subcc %g1, %g2, %g1 ! Next cacheline | |
1159 | bge,pt %icc, 1b | |
1160 | nop | |
1161 | ba,pt %xcc, dcpe_icpe_tl1_common | |
1162 | nop | |
1163 | ||
1164 | do_dcpe_tl1_fatal: | |
1165 | sethi %hi(1f), %g7 | |
1166 | ba,pt %xcc, etraptl1 | |
1167 | 1: or %g7, %lo(1b), %g7 | |
1168 | mov 0x2, %o0 | |
1169 | call cheetah_plus_parity_error | |
1170 | add %sp, PTREGS_OFF, %o1 | |
1171 | ba,pt %xcc, rtrap | |
1172 | clr %l6 | |
1173 | ||
1174 | do_icpe_tl1: | |
1175 | rdpr %tl, %g1 ! Save original trap level | |
1176 | mov 1, %g2 ! Setup TSTATE checking loop | |
1177 | sethi %hi(TSTATE_IG), %g3 ! TSTATE mask bit | |
1178 | 1: wrpr %g2, %tl ! Set trap level to check | |
1179 | rdpr %tstate, %g4 ! Read TSTATE for this level | |
1180 | andcc %g4, %g3, %g0 ! Interrupt globals in use? | |
1181 | bne,a,pn %xcc, do_icpe_tl1_fatal ! Yep, irrecoverable | |
1182 | wrpr %g1, %tl ! Restore original trap level | |
1183 | add %g2, 1, %g2 ! Next trap level | |
1184 | cmp %g2, %g1 ! Hit them all yet? | |
1185 | ble,pt %icc, 1b ! Not yet | |
1186 | nop | |
1187 | wrpr %g1, %tl ! Restore original trap level | |
1188 | do_icpe_tl1_nonfatal: /* Ok we may use interrupt globals safely. */ | |
1189 | /* Flush I-cache */ | |
1190 | sethi %hi(1 << 15), %g1 ! I-cache size | |
1191 | mov (1 << 5), %g2 ! I-cache line size | |
1192 | sub %g1, %g2, %g1 | |
1193 | 1: or %g1, (2 << 3), %g3 | |
1194 | stxa %g0, [%g3] ASI_IC_TAG | |
1195 | membar #Sync | |
1196 | subcc %g1, %g2, %g1 | |
1197 | bge,pt %icc, 1b | |
1198 | nop | |
1199 | ba,pt %xcc, dcpe_icpe_tl1_common | |
1200 | nop | |
1201 | ||
1202 | do_icpe_tl1_fatal: | |
1203 | sethi %hi(1f), %g7 | |
1204 | ba,pt %xcc, etraptl1 | |
1205 | 1: or %g7, %lo(1b), %g7 | |
1206 | mov 0x3, %o0 | |
1207 | call cheetah_plus_parity_error | |
1208 | add %sp, PTREGS_OFF, %o1 | |
1209 | ba,pt %xcc, rtrap | |
1210 | clr %l6 | |
1211 | ||
1212 | dcpe_icpe_tl1_common: | |
1213 | /* Flush D-cache, re-enable D/I caches in DCU and finally | |
1214 | * retry the trapping instruction. | |
1215 | */ | |
1216 | sethi %hi(1 << 16), %g1 ! D-cache size | |
1217 | mov (1 << 5), %g2 ! D-cache line size | |
1218 | sub %g1, %g2, %g1 | |
1219 | 1: stxa %g0, [%g1] ASI_DCACHE_TAG | |
1220 | membar #Sync | |
1221 | subcc %g1, %g2, %g1 | |
1222 | bge,pt %icc, 1b | |
1223 | nop | |
1224 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1 | |
1225 | or %g1, (DCU_DC | DCU_IC), %g1 | |
1226 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG | |
1227 | membar #Sync | |
1228 | retry | |
1229 | ||
1230 | /* Cheetah FECC trap handling, we get here from tl{0,1}_fecc | |
1231 | * in the trap table. That code has done a memory barrier | |
1232 | * and has disabled both the I-cache and D-cache in the DCU | |
1233 | * control register. The I-cache is disabled so that we may | |
1234 | * capture the corrupted cache line, and the D-cache is disabled | |
1235 | * because corrupt data may have been placed there and we don't | |
1236 | * want to reference it. | |
1237 | * | |
1238 | * %g1 is one if this trap occurred at %tl >= 1. | |
1239 | * | |
1240 | * Next, we turn off error reporting so that we don't recurse. | |
1241 | */ | |
1242 | .globl cheetah_fast_ecc | |
1243 | cheetah_fast_ecc: | |
1244 | ldxa [%g0] ASI_ESTATE_ERROR_EN, %g2 | |
1245 | andn %g2, ESTATE_ERROR_NCEEN | ESTATE_ERROR_CEEN, %g2 | |
1246 | stxa %g2, [%g0] ASI_ESTATE_ERROR_EN | |
1247 | membar #Sync | |
1248 | ||
1249 | /* Fetch and clear AFSR/AFAR */ | |
1250 | ldxa [%g0] ASI_AFSR, %g4 | |
1251 | ldxa [%g0] ASI_AFAR, %g5 | |
1252 | stxa %g4, [%g0] ASI_AFSR | |
1253 | membar #Sync | |
1254 | ||
1255 | CHEETAH_LOG_ERROR | |
1256 | ||
1257 | rdpr %pil, %g2 | |
1258 | wrpr %g0, 15, %pil | |
1259 | ba,pt %xcc, etrap_irq | |
1260 | rd %pc, %g7 | |
1261 | mov %l4, %o1 | |
1262 | mov %l5, %o2 | |
1263 | call cheetah_fecc_handler | |
1264 | add %sp, PTREGS_OFF, %o0 | |
1265 | ba,a,pt %xcc, rtrap_irq | |
1266 | ||
1267 | /* Our caller has disabled I-cache and performed membar Sync. */ | |
1268 | .globl cheetah_cee | |
1269 | cheetah_cee: | |
1270 | ldxa [%g0] ASI_ESTATE_ERROR_EN, %g2 | |
1271 | andn %g2, ESTATE_ERROR_CEEN, %g2 | |
1272 | stxa %g2, [%g0] ASI_ESTATE_ERROR_EN | |
1273 | membar #Sync | |
1274 | ||
1275 | /* Fetch and clear AFSR/AFAR */ | |
1276 | ldxa [%g0] ASI_AFSR, %g4 | |
1277 | ldxa [%g0] ASI_AFAR, %g5 | |
1278 | stxa %g4, [%g0] ASI_AFSR | |
1279 | membar #Sync | |
1280 | ||
1281 | CHEETAH_LOG_ERROR | |
1282 | ||
1283 | rdpr %pil, %g2 | |
1284 | wrpr %g0, 15, %pil | |
1285 | ba,pt %xcc, etrap_irq | |
1286 | rd %pc, %g7 | |
1287 | mov %l4, %o1 | |
1288 | mov %l5, %o2 | |
1289 | call cheetah_cee_handler | |
1290 | add %sp, PTREGS_OFF, %o0 | |
1291 | ba,a,pt %xcc, rtrap_irq | |
1292 | ||
1293 | /* Our caller has disabled I-cache+D-cache and performed membar Sync. */ | |
1294 | .globl cheetah_deferred_trap | |
1295 | cheetah_deferred_trap: | |
1296 | ldxa [%g0] ASI_ESTATE_ERROR_EN, %g2 | |
1297 | andn %g2, ESTATE_ERROR_NCEEN | ESTATE_ERROR_CEEN, %g2 | |
1298 | stxa %g2, [%g0] ASI_ESTATE_ERROR_EN | |
1299 | membar #Sync | |
1300 | ||
1301 | /* Fetch and clear AFSR/AFAR */ | |
1302 | ldxa [%g0] ASI_AFSR, %g4 | |
1303 | ldxa [%g0] ASI_AFAR, %g5 | |
1304 | stxa %g4, [%g0] ASI_AFSR | |
1305 | membar #Sync | |
1306 | ||
1307 | CHEETAH_LOG_ERROR | |
1308 | ||
1309 | rdpr %pil, %g2 | |
1310 | wrpr %g0, 15, %pil | |
1311 | ba,pt %xcc, etrap_irq | |
1312 | rd %pc, %g7 | |
1313 | mov %l4, %o1 | |
1314 | mov %l5, %o2 | |
1315 | call cheetah_deferred_handler | |
1316 | add %sp, PTREGS_OFF, %o0 | |
1317 | ba,a,pt %xcc, rtrap_irq | |
1318 | ||
1319 | .globl __do_privact | |
1320 | __do_privact: | |
1321 | mov TLB_SFSR, %g3 | |
1322 | stxa %g0, [%g3] ASI_DMMU ! Clear FaultValid bit | |
1323 | membar #Sync | |
1324 | sethi %hi(109f), %g7 | |
1325 | ba,pt %xcc, etrap | |
1326 | 109: or %g7, %lo(109b), %g7 | |
1327 | call do_privact | |
1328 | add %sp, PTREGS_OFF, %o0 | |
1329 | ba,pt %xcc, rtrap | |
1330 | clr %l6 | |
1331 | ||
1332 | .globl do_mna | |
1333 | do_mna: | |
1334 | rdpr %tl, %g3 | |
1335 | cmp %g3, 1 | |
1336 | ||
1337 | /* Setup %g4/%g5 now as they are used in the | |
1338 | * winfixup code. | |
1339 | */ | |
1340 | mov TLB_SFSR, %g3 | |
1341 | mov DMMU_SFAR, %g4 | |
1342 | ldxa [%g4] ASI_DMMU, %g4 | |
1343 | ldxa [%g3] ASI_DMMU, %g5 | |
1344 | stxa %g0, [%g3] ASI_DMMU ! Clear FaultValid bit | |
1345 | membar #Sync | |
1346 | bgu,pn %icc, winfix_mna | |
1347 | rdpr %tpc, %g3 | |
1348 | ||
1349 | 1: sethi %hi(109f), %g7 | |
1350 | ba,pt %xcc, etrap | |
1351 | 109: or %g7, %lo(109b), %g7 | |
1352 | mov %l4, %o1 | |
1353 | mov %l5, %o2 | |
1354 | call mem_address_unaligned | |
1355 | add %sp, PTREGS_OFF, %o0 | |
1356 | ba,pt %xcc, rtrap | |
1357 | clr %l6 | |
1358 | ||
1359 | .globl do_lddfmna | |
1360 | do_lddfmna: | |
1361 | sethi %hi(109f), %g7 | |
1362 | mov TLB_SFSR, %g4 | |
1363 | ldxa [%g4] ASI_DMMU, %g5 | |
1364 | stxa %g0, [%g4] ASI_DMMU ! Clear FaultValid bit | |
1365 | membar #Sync | |
1366 | mov DMMU_SFAR, %g4 | |
1367 | ldxa [%g4] ASI_DMMU, %g4 | |
1368 | ba,pt %xcc, etrap | |
1369 | 109: or %g7, %lo(109b), %g7 | |
1370 | mov %l4, %o1 | |
1371 | mov %l5, %o2 | |
1372 | call handle_lddfmna | |
1373 | add %sp, PTREGS_OFF, %o0 | |
1374 | ba,pt %xcc, rtrap | |
1375 | clr %l6 | |
1376 | ||
1377 | .globl do_stdfmna | |
1378 | do_stdfmna: | |
1379 | sethi %hi(109f), %g7 | |
1380 | mov TLB_SFSR, %g4 | |
1381 | ldxa [%g4] ASI_DMMU, %g5 | |
1382 | stxa %g0, [%g4] ASI_DMMU ! Clear FaultValid bit | |
1383 | membar #Sync | |
1384 | mov DMMU_SFAR, %g4 | |
1385 | ldxa [%g4] ASI_DMMU, %g4 | |
1386 | ba,pt %xcc, etrap | |
1387 | 109: or %g7, %lo(109b), %g7 | |
1388 | mov %l4, %o1 | |
1389 | mov %l5, %o2 | |
1390 | call handle_stdfmna | |
1391 | add %sp, PTREGS_OFF, %o0 | |
1392 | ba,pt %xcc, rtrap | |
1393 | clr %l6 | |
1394 | ||
1395 | .globl breakpoint_trap | |
1396 | breakpoint_trap: | |
1397 | call sparc_breakpoint | |
1398 | add %sp, PTREGS_OFF, %o0 | |
1399 | ba,pt %xcc, rtrap | |
1400 | nop | |
1401 | ||
1402 | #if defined(CONFIG_SUNOS_EMUL) || defined(CONFIG_SOLARIS_EMUL) || \ | |
1403 | defined(CONFIG_SOLARIS_EMUL_MODULE) | |
1404 | /* SunOS uses syscall zero as the 'indirect syscall' it looks | |
1405 | * like indir_syscall(scall_num, arg0, arg1, arg2...); etc. | |
1406 | * This is complete brain damage. | |
1407 | */ | |
1408 | .globl sunos_indir | |
1409 | sunos_indir: | |
1410 | srl %o0, 0, %o0 | |
1411 | mov %o7, %l4 | |
1412 | cmp %o0, NR_SYSCALLS | |
1413 | blu,a,pt %icc, 1f | |
1414 | sll %o0, 0x2, %o0 | |
1415 | sethi %hi(sunos_nosys), %l6 | |
1416 | b,pt %xcc, 2f | |
1417 | or %l6, %lo(sunos_nosys), %l6 | |
1418 | 1: sethi %hi(sunos_sys_table), %l7 | |
1419 | or %l7, %lo(sunos_sys_table), %l7 | |
1420 | lduw [%l7 + %o0], %l6 | |
1421 | 2: mov %o1, %o0 | |
1422 | mov %o2, %o1 | |
1423 | mov %o3, %o2 | |
1424 | mov %o4, %o3 | |
1425 | mov %o5, %o4 | |
1426 | call %l6 | |
1427 | mov %l4, %o7 | |
1428 | ||
1429 | .globl sunos_getpid | |
1430 | sunos_getpid: | |
1431 | call sys_getppid | |
1432 | nop | |
1433 | call sys_getpid | |
1434 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I1] | |
1435 | b,pt %xcc, ret_sys_call | |
1436 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I0] | |
1437 | ||
1438 | /* SunOS getuid() returns uid in %o0 and euid in %o1 */ | |
1439 | .globl sunos_getuid | |
1440 | sunos_getuid: | |
1441 | call sys32_geteuid16 | |
1442 | nop | |
1443 | call sys32_getuid16 | |
1444 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I1] | |
1445 | b,pt %xcc, ret_sys_call | |
1446 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I0] | |
1447 | ||
1448 | /* SunOS getgid() returns gid in %o0 and egid in %o1 */ | |
1449 | .globl sunos_getgid | |
1450 | sunos_getgid: | |
1451 | call sys32_getegid16 | |
1452 | nop | |
1453 | call sys32_getgid16 | |
1454 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I1] | |
1455 | b,pt %xcc, ret_sys_call | |
1456 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I0] | |
1457 | #endif | |
1458 | ||
1459 | /* SunOS's execv() call only specifies the argv argument, the | |
1460 | * environment settings are the same as the calling processes. | |
1461 | */ | |
1462 | .globl sunos_execv | |
1463 | sys_execve: | |
1464 | sethi %hi(sparc_execve), %g1 | |
1465 | ba,pt %xcc, execve_merge | |
1466 | or %g1, %lo(sparc_execve), %g1 | |
1467 | #ifdef CONFIG_COMPAT | |
1468 | .globl sys_execve | |
1469 | sunos_execv: | |
1470 | stx %g0, [%sp + PTREGS_OFF + PT_V9_I2] | |
1471 | .globl sys32_execve | |
1472 | sys32_execve: | |
1473 | sethi %hi(sparc32_execve), %g1 | |
1474 | or %g1, %lo(sparc32_execve), %g1 | |
1475 | #endif | |
1476 | execve_merge: | |
1477 | flushw | |
1478 | jmpl %g1, %g0 | |
1479 | add %sp, PTREGS_OFF, %o0 | |
1480 | ||
1481 | .globl sys_pipe, sys_sigpause, sys_nis_syscall | |
1482 | .globl sys_sigsuspend, sys_rt_sigsuspend | |
1483 | .globl sys_rt_sigreturn | |
1484 | .globl sys_ptrace | |
1485 | .globl sys_sigaltstack | |
1486 | .align 32 | |
1487 | sys_pipe: ba,pt %xcc, sparc_pipe | |
1488 | add %sp, PTREGS_OFF, %o0 | |
1489 | sys_nis_syscall:ba,pt %xcc, c_sys_nis_syscall | |
1490 | add %sp, PTREGS_OFF, %o0 | |
1491 | sys_memory_ordering: | |
1492 | ba,pt %xcc, sparc_memory_ordering | |
1493 | add %sp, PTREGS_OFF, %o1 | |
1494 | sys_sigaltstack:ba,pt %xcc, do_sigaltstack | |
1495 | add %i6, STACK_BIAS, %o2 | |
1496 | #ifdef CONFIG_COMPAT | |
1497 | .globl sys32_sigstack | |
1498 | sys32_sigstack: ba,pt %xcc, do_sys32_sigstack | |
1499 | mov %i6, %o2 | |
1500 | .globl sys32_sigaltstack | |
1501 | sys32_sigaltstack: | |
1502 | ba,pt %xcc, do_sys32_sigaltstack | |
1503 | mov %i6, %o2 | |
1504 | #endif | |
1505 | .align 32 | |
1506 | sys_sigsuspend: add %sp, PTREGS_OFF, %o0 | |
1507 | call do_sigsuspend | |
1508 | add %o7, 1f-.-4, %o7 | |
1509 | nop | |
1510 | sys_rt_sigsuspend: /* NOTE: %o0,%o1 have a correct value already */ | |
1511 | add %sp, PTREGS_OFF, %o2 | |
1512 | call do_rt_sigsuspend | |
1513 | add %o7, 1f-.-4, %o7 | |
1514 | nop | |
1515 | #ifdef CONFIG_COMPAT | |
1516 | .globl sys32_rt_sigsuspend | |
1517 | sys32_rt_sigsuspend: /* NOTE: %o0,%o1 have a correct value already */ | |
1518 | srl %o0, 0, %o0 | |
1519 | add %sp, PTREGS_OFF, %o2 | |
1520 | call do_rt_sigsuspend32 | |
1521 | add %o7, 1f-.-4, %o7 | |
1522 | #endif | |
1523 | /* NOTE: %o0 has a correct value already */ | |
1524 | sys_sigpause: add %sp, PTREGS_OFF, %o1 | |
1525 | call do_sigpause | |
1526 | add %o7, 1f-.-4, %o7 | |
1527 | nop | |
1528 | #ifdef CONFIG_COMPAT | |
1529 | .globl sys32_sigreturn | |
1530 | sys32_sigreturn: | |
1531 | add %sp, PTREGS_OFF, %o0 | |
1532 | call do_sigreturn32 | |
1533 | add %o7, 1f-.-4, %o7 | |
1534 | nop | |
1535 | #endif | |
1536 | sys_rt_sigreturn: | |
1537 | add %sp, PTREGS_OFF, %o0 | |
1538 | call do_rt_sigreturn | |
1539 | add %o7, 1f-.-4, %o7 | |
1540 | nop | |
1541 | #ifdef CONFIG_COMPAT | |
1542 | .globl sys32_rt_sigreturn | |
1543 | sys32_rt_sigreturn: | |
1544 | add %sp, PTREGS_OFF, %o0 | |
1545 | call do_rt_sigreturn32 | |
1546 | add %o7, 1f-.-4, %o7 | |
1547 | nop | |
1548 | #endif | |
1549 | sys_ptrace: add %sp, PTREGS_OFF, %o0 | |
1550 | call do_ptrace | |
1551 | add %o7, 1f-.-4, %o7 | |
1552 | nop | |
1553 | .align 32 | |
1554 | 1: ldx [%curptr + TI_FLAGS], %l5 | |
f7ceba36 | 1555 | andcc %l5, (_TIF_SYSCALL_TRACE|_TIF_SECCOMP|_TIF_SYSCALL_AUDIT), %g0 |
1da177e4 LT |
1556 | be,pt %icc, rtrap |
1557 | clr %l6 | |
8d8a6479 | 1558 | add %sp, PTREGS_OFF, %o0 |
1da177e4 | 1559 | call syscall_trace |
8d8a6479 | 1560 | mov 1, %o1 |
1da177e4 LT |
1561 | |
1562 | ba,pt %xcc, rtrap | |
1563 | clr %l6 | |
1564 | ||
1565 | /* This is how fork() was meant to be done, 8 instruction entry. | |
1566 | * | |
1567 | * I questioned the following code briefly, let me clear things | |
1568 | * up so you must not reason on it like I did. | |
1569 | * | |
1570 | * Know the fork_kpsr etc. we use in the sparc32 port? We don't | |
1571 | * need it here because the only piece of window state we copy to | |
1572 | * the child is the CWP register. Even if the parent sleeps, | |
1573 | * we are safe because we stuck it into pt_regs of the parent | |
1574 | * so it will not change. | |
1575 | * | |
1576 | * XXX This raises the question, whether we can do the same on | |
1577 | * XXX sparc32 to get rid of fork_kpsr _and_ fork_kwim. The | |
1578 | * XXX answer is yes. We stick fork_kpsr in UREG_G0 and | |
1579 | * XXX fork_kwim in UREG_G1 (global registers are considered | |
1580 | * XXX volatile across a system call in the sparc ABI I think | |
1581 | * XXX if it isn't we can use regs->y instead, anyone who depends | |
1582 | * XXX upon the Y register being preserved across a fork deserves | |
1583 | * XXX to lose). | |
1584 | * | |
1585 | * In fact we should take advantage of that fact for other things | |
1586 | * during system calls... | |
1587 | */ | |
1588 | .globl sys_fork, sys_vfork, sys_clone, sparc_exit | |
1589 | .globl ret_from_syscall | |
1590 | .align 32 | |
1591 | sys_vfork: /* Under Linux, vfork and fork are just special cases of clone. */ | |
1592 | sethi %hi(0x4000 | 0x0100 | SIGCHLD), %o0 | |
1593 | or %o0, %lo(0x4000 | 0x0100 | SIGCHLD), %o0 | |
1594 | ba,pt %xcc, sys_clone | |
1595 | sys_fork: clr %o1 | |
1596 | mov SIGCHLD, %o0 | |
1597 | sys_clone: flushw | |
1598 | movrz %o1, %fp, %o1 | |
1599 | mov 0, %o3 | |
1600 | ba,pt %xcc, sparc_do_fork | |
1601 | add %sp, PTREGS_OFF, %o2 | |
1602 | ret_from_syscall: | |
1603 | /* Clear SPARC_FLAG_NEWCHILD, switch_to leaves thread.flags in | |
1604 | * %o7 for us. Check performance counter stuff too. | |
1605 | */ | |
1606 | andn %o7, _TIF_NEWCHILD, %l0 | |
1607 | stx %l0, [%g6 + TI_FLAGS] | |
1608 | call schedule_tail | |
1609 | mov %g7, %o0 | |
1610 | andcc %l0, _TIF_PERFCTR, %g0 | |
1611 | be,pt %icc, 1f | |
1612 | nop | |
1613 | ldx [%g6 + TI_PCR], %o7 | |
1614 | wr %g0, %o7, %pcr | |
1615 | ||
1616 | /* Blackbird errata workaround. See commentary in | |
1617 | * smp.c:smp_percpu_timer_interrupt() for more | |
1618 | * information. | |
1619 | */ | |
1620 | ba,pt %xcc, 99f | |
1621 | nop | |
1622 | .align 64 | |
1623 | 99: wr %g0, %g0, %pic | |
1624 | rd %pic, %g0 | |
1625 | ||
1626 | 1: b,pt %xcc, ret_sys_call | |
1627 | ldx [%sp + PTREGS_OFF + PT_V9_I0], %o0 | |
1628 | sparc_exit: wrpr %g0, (PSTATE_RMO | PSTATE_PEF | PSTATE_PRIV), %pstate | |
1629 | rdpr %otherwin, %g1 | |
1630 | rdpr %cansave, %g3 | |
1631 | add %g3, %g1, %g3 | |
1632 | wrpr %g3, 0x0, %cansave | |
1633 | wrpr %g0, 0x0, %otherwin | |
1634 | wrpr %g0, (PSTATE_RMO | PSTATE_PEF | PSTATE_PRIV | PSTATE_IE), %pstate | |
1635 | ba,pt %xcc, sys_exit | |
1636 | stb %g0, [%g6 + TI_WSAVED] | |
1637 | ||
1638 | linux_sparc_ni_syscall: | |
1639 | sethi %hi(sys_ni_syscall), %l7 | |
1640 | b,pt %xcc, 4f | |
1641 | or %l7, %lo(sys_ni_syscall), %l7 | |
1642 | ||
1643 | linux_syscall_trace32: | |
8d8a6479 | 1644 | add %sp, PTREGS_OFF, %o0 |
1da177e4 | 1645 | call syscall_trace |
8d8a6479 | 1646 | clr %o1 |
1da177e4 | 1647 | srl %i0, 0, %o0 |
8d8a6479 | 1648 | srl %i4, 0, %o4 |
1da177e4 LT |
1649 | srl %i1, 0, %o1 |
1650 | srl %i2, 0, %o2 | |
1651 | b,pt %xcc, 2f | |
1652 | srl %i3, 0, %o3 | |
1653 | ||
1654 | linux_syscall_trace: | |
8d8a6479 | 1655 | add %sp, PTREGS_OFF, %o0 |
1da177e4 | 1656 | call syscall_trace |
8d8a6479 | 1657 | clr %o1 |
1da177e4 LT |
1658 | mov %i0, %o0 |
1659 | mov %i1, %o1 | |
1660 | mov %i2, %o2 | |
1661 | mov %i3, %o3 | |
1662 | b,pt %xcc, 2f | |
1663 | mov %i4, %o4 | |
1664 | ||
1665 | ||
1666 | /* Linux 32-bit and SunOS system calls enter here... */ | |
1667 | .align 32 | |
1668 | .globl linux_sparc_syscall32 | |
1669 | linux_sparc_syscall32: | |
1670 | /* Direct access to user regs, much faster. */ | |
1671 | cmp %g1, NR_SYSCALLS ! IEU1 Group | |
1672 | bgeu,pn %xcc, linux_sparc_ni_syscall ! CTI | |
1673 | srl %i0, 0, %o0 ! IEU0 | |
1674 | sll %g1, 2, %l4 ! IEU0 Group | |
1da177e4 LT |
1675 | srl %i4, 0, %o4 ! IEU1 |
1676 | lduw [%l7 + %l4], %l7 ! Load | |
1677 | srl %i1, 0, %o1 ! IEU0 Group | |
1678 | ldx [%curptr + TI_FLAGS], %l0 ! Load | |
1679 | ||
1680 | srl %i5, 0, %o5 ! IEU1 | |
1681 | srl %i2, 0, %o2 ! IEU0 Group | |
f7ceba36 | 1682 | andcc %l0, (_TIF_SYSCALL_TRACE|_TIF_SECCOMP|_TIF_SYSCALL_AUDIT), %g0 |
1da177e4 LT |
1683 | bne,pn %icc, linux_syscall_trace32 ! CTI |
1684 | mov %i0, %l5 ! IEU1 | |
1685 | call %l7 ! CTI Group brk forced | |
1686 | srl %i3, 0, %o3 ! IEU0 | |
1687 | ba,a,pt %xcc, 3f | |
1688 | ||
1689 | /* Linux native and SunOS system calls enter here... */ | |
1690 | .align 32 | |
1691 | .globl linux_sparc_syscall, ret_sys_call | |
1692 | linux_sparc_syscall: | |
1693 | /* Direct access to user regs, much faster. */ | |
1694 | cmp %g1, NR_SYSCALLS ! IEU1 Group | |
1695 | bgeu,pn %xcc, linux_sparc_ni_syscall ! CTI | |
1696 | mov %i0, %o0 ! IEU0 | |
1697 | sll %g1, 2, %l4 ! IEU0 Group | |
1da177e4 LT |
1698 | mov %i1, %o1 ! IEU1 |
1699 | lduw [%l7 + %l4], %l7 ! Load | |
1700 | 4: mov %i2, %o2 ! IEU0 Group | |
1701 | ldx [%curptr + TI_FLAGS], %l0 ! Load | |
1702 | ||
1703 | mov %i3, %o3 ! IEU1 | |
1704 | mov %i4, %o4 ! IEU0 Group | |
f7ceba36 | 1705 | andcc %l0, (_TIF_SYSCALL_TRACE|_TIF_SECCOMP|_TIF_SYSCALL_AUDIT), %g0 |
1da177e4 LT |
1706 | bne,pn %icc, linux_syscall_trace ! CTI Group |
1707 | mov %i0, %l5 ! IEU0 | |
1708 | 2: call %l7 ! CTI Group brk forced | |
1709 | mov %i5, %o5 ! IEU0 | |
1710 | nop | |
1711 | ||
1712 | 3: stx %o0, [%sp + PTREGS_OFF + PT_V9_I0] | |
1713 | ret_sys_call: | |
1da177e4 LT |
1714 | ldx [%sp + PTREGS_OFF + PT_V9_TSTATE], %g3 |
1715 | ldx [%sp + PTREGS_OFF + PT_V9_TNPC], %l1 ! pc = npc | |
1716 | sra %o0, 0, %o0 | |
1717 | mov %ulo(TSTATE_XCARRY | TSTATE_ICARRY), %g2 | |
1718 | sllx %g2, 32, %g2 | |
1719 | ||
1720 | /* Check if force_successful_syscall_return() | |
1721 | * was invoked. | |
1722 | */ | |
1723 | ldx [%curptr + TI_FLAGS], %l0 | |
1724 | andcc %l0, _TIF_SYSCALL_SUCCESS, %g0 | |
1725 | be,pt %icc, 1f | |
1726 | andn %l0, _TIF_SYSCALL_SUCCESS, %l0 | |
1727 | ba,pt %xcc, 80f | |
1728 | stx %l0, [%curptr + TI_FLAGS] | |
1729 | ||
1730 | 1: | |
1731 | cmp %o0, -ERESTART_RESTARTBLOCK | |
1732 | bgeu,pn %xcc, 1f | |
f7ceba36 | 1733 | andcc %l0, (_TIF_SYSCALL_TRACE|_TIF_SECCOMP|_TIF_SYSCALL_AUDIT), %l6 |
1da177e4 LT |
1734 | 80: |
1735 | /* System call success, clear Carry condition code. */ | |
1736 | andn %g3, %g2, %g3 | |
1737 | stx %g3, [%sp + PTREGS_OFF + PT_V9_TSTATE] | |
1738 | bne,pn %icc, linux_syscall_trace2 | |
1739 | add %l1, 0x4, %l2 ! npc = npc+4 | |
1740 | stx %l1, [%sp + PTREGS_OFF + PT_V9_TPC] | |
1741 | ba,pt %xcc, rtrap_clr_l6 | |
1742 | stx %l2, [%sp + PTREGS_OFF + PT_V9_TNPC] | |
1743 | ||
1744 | 1: | |
1745 | /* System call failure, set Carry condition code. | |
1746 | * Also, get abs(errno) to return to the process. | |
1747 | */ | |
f7ceba36 | 1748 | andcc %l0, (_TIF_SYSCALL_TRACE|_TIF_SECCOMP|_TIF_SYSCALL_AUDIT), %l6 |
1da177e4 LT |
1749 | sub %g0, %o0, %o0 |
1750 | or %g3, %g2, %g3 | |
1751 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I0] | |
1752 | mov 1, %l6 | |
1753 | stx %g3, [%sp + PTREGS_OFF + PT_V9_TSTATE] | |
1754 | bne,pn %icc, linux_syscall_trace2 | |
1755 | add %l1, 0x4, %l2 ! npc = npc+4 | |
1756 | stx %l1, [%sp + PTREGS_OFF + PT_V9_TPC] | |
1757 | ||
1758 | b,pt %xcc, rtrap | |
1759 | stx %l2, [%sp + PTREGS_OFF + PT_V9_TNPC] | |
1760 | linux_syscall_trace2: | |
8d8a6479 | 1761 | add %sp, PTREGS_OFF, %o0 |
1da177e4 | 1762 | call syscall_trace |
8d8a6479 | 1763 | mov 1, %o1 |
1da177e4 LT |
1764 | stx %l1, [%sp + PTREGS_OFF + PT_V9_TPC] |
1765 | ba,pt %xcc, rtrap | |
1766 | stx %l2, [%sp + PTREGS_OFF + PT_V9_TNPC] | |
1767 | ||
1768 | .align 32 | |
1769 | .globl __flushw_user | |
1770 | __flushw_user: | |
1771 | rdpr %otherwin, %g1 | |
1772 | brz,pn %g1, 2f | |
1773 | clr %g2 | |
1774 | 1: save %sp, -128, %sp | |
1775 | rdpr %otherwin, %g1 | |
1776 | brnz,pt %g1, 1b | |
1777 | add %g2, 1, %g2 | |
1778 | 1: sub %g2, 1, %g2 | |
1779 | brnz,pt %g2, 1b | |
1780 | restore %g0, %g0, %g0 | |
1781 | 2: retl | |
1782 | nop |