Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | /* $Id: entry.S,v 1.144 2002/02/09 19:49:30 davem Exp $ |
2 | * arch/sparc64/kernel/entry.S: Sparc64 trap low-level entry points. | |
3 | * | |
4 | * Copyright (C) 1995,1997 David S. Miller (davem@caip.rutgers.edu) | |
5 | * Copyright (C) 1996 Eddie C. Dost (ecd@skynet.be) | |
6 | * Copyright (C) 1996 Miguel de Icaza (miguel@nuclecu.unam.mx) | |
7 | * Copyright (C) 1996,98,99 Jakub Jelinek (jj@sunsite.mff.cuni.cz) | |
8 | */ | |
9 | ||
10 | #include <linux/config.h> | |
11 | #include <linux/errno.h> | |
12 | ||
13 | #include <asm/head.h> | |
14 | #include <asm/asi.h> | |
15 | #include <asm/smp.h> | |
16 | #include <asm/ptrace.h> | |
17 | #include <asm/page.h> | |
18 | #include <asm/signal.h> | |
19 | #include <asm/pgtable.h> | |
20 | #include <asm/processor.h> | |
21 | #include <asm/visasm.h> | |
22 | #include <asm/estate.h> | |
23 | #include <asm/auxio.h> | |
6c52a96e | 24 | #include <asm/sfafsr.h> |
1da177e4 | 25 | |
1da177e4 LT |
26 | #define curptr g6 |
27 | ||
28 | #define NR_SYSCALLS 284 /* Each OS is different... */ | |
29 | ||
30 | .text | |
31 | .align 32 | |
32 | ||
33 | .globl sparc64_vpte_patchme1 | |
34 | .globl sparc64_vpte_patchme2 | |
35 | /* | |
36 | * On a second level vpte miss, check whether the original fault is to the OBP | |
37 | * range (note that this is only possible for instruction miss, data misses to | |
38 | * obp range do not use vpte). If so, go back directly to the faulting address. | |
39 | * This is because we want to read the tpc, otherwise we have no way of knowing | |
40 | * the 8k aligned faulting address if we are using >8k kernel pagesize. This | |
41 | * also ensures no vpte range addresses are dropped into tlb while obp is | |
42 | * executing (see inherit_locked_prom_mappings() rant). | |
43 | */ | |
44 | sparc64_vpte_nucleus: | |
45 | /* Load 0xf0000000, which is LOW_OBP_ADDRESS. */ | |
46 | mov 0xf, %g5 | |
47 | sllx %g5, 28, %g5 | |
48 | ||
49 | /* Is addr >= LOW_OBP_ADDRESS? */ | |
50 | cmp %g4, %g5 | |
51 | blu,pn %xcc, sparc64_vpte_patchme1 | |
52 | mov 0x1, %g5 | |
53 | ||
54 | /* Load 0x100000000, which is HI_OBP_ADDRESS. */ | |
55 | sllx %g5, 32, %g5 | |
56 | ||
57 | /* Is addr < HI_OBP_ADDRESS? */ | |
58 | cmp %g4, %g5 | |
59 | blu,pn %xcc, obp_iaddr_patch | |
60 | nop | |
61 | ||
62 | /* These two instructions are patched by paginig_init(). */ | |
63 | sparc64_vpte_patchme1: | |
64 | sethi %hi(0), %g5 | |
65 | sparc64_vpte_patchme2: | |
66 | or %g5, %lo(0), %g5 | |
67 | ||
68 | /* With kernel PGD in %g5, branch back into dtlb_backend. */ | |
69 | ba,pt %xcc, sparc64_kpte_continue | |
70 | andn %g1, 0x3, %g1 /* Finish PMD offset adjustment. */ | |
71 | ||
72 | vpte_noent: | |
73 | /* Restore previous TAG_ACCESS, %g5 is zero, and we will | |
74 | * skip over the trap instruction so that the top level | |
75 | * TLB miss handler will thing this %g5 value is just an | |
76 | * invalid PTE, thus branching to full fault processing. | |
77 | */ | |
78 | mov TLB_SFSR, %g1 | |
79 | stxa %g4, [%g1 + %g1] ASI_DMMU | |
80 | done | |
81 | ||
82 | .globl obp_iaddr_patch | |
83 | obp_iaddr_patch: | |
84 | /* These two instructions patched by inherit_prom_mappings(). */ | |
85 | sethi %hi(0), %g5 | |
86 | or %g5, %lo(0), %g5 | |
87 | ||
88 | /* Behave as if we are at TL0. */ | |
89 | wrpr %g0, 1, %tl | |
90 | rdpr %tpc, %g4 /* Find original faulting iaddr */ | |
91 | srlx %g4, 13, %g4 /* Throw out context bits */ | |
92 | sllx %g4, 13, %g4 /* g4 has vpn + ctx0 now */ | |
93 | ||
94 | /* Restore previous TAG_ACCESS. */ | |
95 | mov TLB_SFSR, %g1 | |
96 | stxa %g4, [%g1 + %g1] ASI_IMMU | |
97 | ||
98 | /* Get PMD offset. */ | |
99 | srlx %g4, 23, %g6 | |
100 | and %g6, 0x7ff, %g6 | |
101 | sllx %g6, 2, %g6 | |
102 | ||
103 | /* Load PMD, is it valid? */ | |
104 | lduwa [%g5 + %g6] ASI_PHYS_USE_EC, %g5 | |
105 | brz,pn %g5, longpath | |
106 | sllx %g5, 11, %g5 | |
107 | ||
108 | /* Get PTE offset. */ | |
109 | srlx %g4, 13, %g6 | |
110 | and %g6, 0x3ff, %g6 | |
111 | sllx %g6, 3, %g6 | |
112 | ||
113 | /* Load PTE. */ | |
114 | ldxa [%g5 + %g6] ASI_PHYS_USE_EC, %g5 | |
115 | brgez,pn %g5, longpath | |
116 | nop | |
117 | ||
118 | /* TLB load and return from trap. */ | |
119 | stxa %g5, [%g0] ASI_ITLB_DATA_IN | |
120 | retry | |
121 | ||
122 | .globl obp_daddr_patch | |
123 | obp_daddr_patch: | |
124 | /* These two instructions patched by inherit_prom_mappings(). */ | |
125 | sethi %hi(0), %g5 | |
126 | or %g5, %lo(0), %g5 | |
127 | ||
128 | /* Get PMD offset. */ | |
129 | srlx %g4, 23, %g6 | |
130 | and %g6, 0x7ff, %g6 | |
131 | sllx %g6, 2, %g6 | |
132 | ||
133 | /* Load PMD, is it valid? */ | |
134 | lduwa [%g5 + %g6] ASI_PHYS_USE_EC, %g5 | |
135 | brz,pn %g5, longpath | |
136 | sllx %g5, 11, %g5 | |
137 | ||
138 | /* Get PTE offset. */ | |
139 | srlx %g4, 13, %g6 | |
140 | and %g6, 0x3ff, %g6 | |
141 | sllx %g6, 3, %g6 | |
142 | ||
143 | /* Load PTE. */ | |
144 | ldxa [%g5 + %g6] ASI_PHYS_USE_EC, %g5 | |
145 | brgez,pn %g5, longpath | |
146 | nop | |
147 | ||
148 | /* TLB load and return from trap. */ | |
149 | stxa %g5, [%g0] ASI_DTLB_DATA_IN | |
150 | retry | |
151 | ||
152 | /* | |
153 | * On a first level data miss, check whether this is to the OBP range (note | |
154 | * that such accesses can be made by prom, as well as by kernel using | |
155 | * prom_getproperty on "address"), and if so, do not use vpte access ... | |
156 | * rather, use information saved during inherit_prom_mappings() using 8k | |
157 | * pagesize. | |
158 | */ | |
159 | kvmap: | |
160 | /* Load 0xf0000000, which is LOW_OBP_ADDRESS. */ | |
161 | mov 0xf, %g5 | |
162 | sllx %g5, 28, %g5 | |
163 | ||
164 | /* Is addr >= LOW_OBP_ADDRESS? */ | |
165 | cmp %g4, %g5 | |
166 | blu,pn %xcc, vmalloc_addr | |
167 | mov 0x1, %g5 | |
168 | ||
169 | /* Load 0x100000000, which is HI_OBP_ADDRESS. */ | |
170 | sllx %g5, 32, %g5 | |
171 | ||
172 | /* Is addr < HI_OBP_ADDRESS? */ | |
173 | cmp %g4, %g5 | |
174 | blu,pn %xcc, obp_daddr_patch | |
175 | nop | |
176 | ||
177 | vmalloc_addr: | |
178 | /* If we get here, a vmalloc addr accessed, load kernel VPTE. */ | |
179 | ldxa [%g3 + %g6] ASI_N, %g5 | |
180 | brgez,pn %g5, longpath | |
181 | nop | |
182 | ||
183 | /* PTE is valid, load into TLB and return from trap. */ | |
184 | stxa %g5, [%g0] ASI_DTLB_DATA_IN ! Reload TLB | |
185 | retry | |
186 | ||
187 | /* This is trivial with the new code... */ | |
188 | .globl do_fpdis | |
189 | do_fpdis: | |
190 | sethi %hi(TSTATE_PEF), %g4 ! IEU0 | |
191 | rdpr %tstate, %g5 | |
192 | andcc %g5, %g4, %g0 | |
193 | be,pt %xcc, 1f | |
194 | nop | |
195 | rd %fprs, %g5 | |
196 | andcc %g5, FPRS_FEF, %g0 | |
197 | be,pt %xcc, 1f | |
198 | nop | |
199 | ||
200 | /* Legal state when DCR_IFPOE is set in Cheetah %dcr. */ | |
201 | sethi %hi(109f), %g7 | |
202 | ba,pt %xcc, etrap | |
203 | 109: or %g7, %lo(109b), %g7 | |
204 | add %g0, %g0, %g0 | |
205 | ba,a,pt %xcc, rtrap_clr_l6 | |
206 | ||
207 | 1: ldub [%g6 + TI_FPSAVED], %g5 ! Load Group | |
208 | wr %g0, FPRS_FEF, %fprs ! LSU Group+4bubbles | |
209 | andcc %g5, FPRS_FEF, %g0 ! IEU1 Group | |
210 | be,a,pt %icc, 1f ! CTI | |
211 | clr %g7 ! IEU0 | |
212 | ldx [%g6 + TI_GSR], %g7 ! Load Group | |
213 | 1: andcc %g5, FPRS_DL, %g0 ! IEU1 | |
214 | bne,pn %icc, 2f ! CTI | |
215 | fzero %f0 ! FPA | |
216 | andcc %g5, FPRS_DU, %g0 ! IEU1 Group | |
217 | bne,pn %icc, 1f ! CTI | |
218 | fzero %f2 ! FPA | |
219 | faddd %f0, %f2, %f4 | |
220 | fmuld %f0, %f2, %f6 | |
221 | faddd %f0, %f2, %f8 | |
222 | fmuld %f0, %f2, %f10 | |
223 | faddd %f0, %f2, %f12 | |
224 | fmuld %f0, %f2, %f14 | |
225 | faddd %f0, %f2, %f16 | |
226 | fmuld %f0, %f2, %f18 | |
227 | faddd %f0, %f2, %f20 | |
228 | fmuld %f0, %f2, %f22 | |
229 | faddd %f0, %f2, %f24 | |
230 | fmuld %f0, %f2, %f26 | |
231 | faddd %f0, %f2, %f28 | |
232 | fmuld %f0, %f2, %f30 | |
233 | faddd %f0, %f2, %f32 | |
234 | fmuld %f0, %f2, %f34 | |
235 | faddd %f0, %f2, %f36 | |
236 | fmuld %f0, %f2, %f38 | |
237 | faddd %f0, %f2, %f40 | |
238 | fmuld %f0, %f2, %f42 | |
239 | faddd %f0, %f2, %f44 | |
240 | fmuld %f0, %f2, %f46 | |
241 | faddd %f0, %f2, %f48 | |
242 | fmuld %f0, %f2, %f50 | |
243 | faddd %f0, %f2, %f52 | |
244 | fmuld %f0, %f2, %f54 | |
245 | faddd %f0, %f2, %f56 | |
246 | fmuld %f0, %f2, %f58 | |
247 | b,pt %xcc, fpdis_exit2 | |
248 | faddd %f0, %f2, %f60 | |
249 | 1: mov SECONDARY_CONTEXT, %g3 | |
250 | add %g6, TI_FPREGS + 0x80, %g1 | |
251 | faddd %f0, %f2, %f4 | |
252 | fmuld %f0, %f2, %f6 | |
253 | ldxa [%g3] ASI_DMMU, %g5 | |
254 | cplus_fptrap_insn_1: | |
255 | sethi %hi(0), %g2 | |
256 | stxa %g2, [%g3] ASI_DMMU | |
257 | membar #Sync | |
258 | add %g6, TI_FPREGS + 0xc0, %g2 | |
259 | faddd %f0, %f2, %f8 | |
260 | fmuld %f0, %f2, %f10 | |
261 | ldda [%g1] ASI_BLK_S, %f32 ! grrr, where is ASI_BLK_NUCLEUS 8-( | |
262 | ldda [%g2] ASI_BLK_S, %f48 | |
263 | faddd %f0, %f2, %f12 | |
264 | fmuld %f0, %f2, %f14 | |
265 | faddd %f0, %f2, %f16 | |
266 | fmuld %f0, %f2, %f18 | |
267 | faddd %f0, %f2, %f20 | |
268 | fmuld %f0, %f2, %f22 | |
269 | faddd %f0, %f2, %f24 | |
270 | fmuld %f0, %f2, %f26 | |
271 | faddd %f0, %f2, %f28 | |
272 | fmuld %f0, %f2, %f30 | |
b445e26c | 273 | membar #Sync |
1da177e4 | 274 | b,pt %xcc, fpdis_exit |
b445e26c | 275 | nop |
1da177e4 LT |
276 | 2: andcc %g5, FPRS_DU, %g0 |
277 | bne,pt %icc, 3f | |
278 | fzero %f32 | |
279 | mov SECONDARY_CONTEXT, %g3 | |
280 | fzero %f34 | |
281 | ldxa [%g3] ASI_DMMU, %g5 | |
282 | add %g6, TI_FPREGS, %g1 | |
283 | cplus_fptrap_insn_2: | |
284 | sethi %hi(0), %g2 | |
285 | stxa %g2, [%g3] ASI_DMMU | |
286 | membar #Sync | |
287 | add %g6, TI_FPREGS + 0x40, %g2 | |
288 | faddd %f32, %f34, %f36 | |
289 | fmuld %f32, %f34, %f38 | |
290 | ldda [%g1] ASI_BLK_S, %f0 ! grrr, where is ASI_BLK_NUCLEUS 8-( | |
291 | ldda [%g2] ASI_BLK_S, %f16 | |
292 | faddd %f32, %f34, %f40 | |
293 | fmuld %f32, %f34, %f42 | |
294 | faddd %f32, %f34, %f44 | |
295 | fmuld %f32, %f34, %f46 | |
296 | faddd %f32, %f34, %f48 | |
297 | fmuld %f32, %f34, %f50 | |
298 | faddd %f32, %f34, %f52 | |
299 | fmuld %f32, %f34, %f54 | |
300 | faddd %f32, %f34, %f56 | |
301 | fmuld %f32, %f34, %f58 | |
302 | faddd %f32, %f34, %f60 | |
303 | fmuld %f32, %f34, %f62 | |
b445e26c | 304 | membar #Sync |
1da177e4 | 305 | ba,pt %xcc, fpdis_exit |
b445e26c | 306 | nop |
1da177e4 LT |
307 | 3: mov SECONDARY_CONTEXT, %g3 |
308 | add %g6, TI_FPREGS, %g1 | |
309 | ldxa [%g3] ASI_DMMU, %g5 | |
310 | cplus_fptrap_insn_3: | |
311 | sethi %hi(0), %g2 | |
312 | stxa %g2, [%g3] ASI_DMMU | |
313 | membar #Sync | |
314 | mov 0x40, %g2 | |
315 | ldda [%g1] ASI_BLK_S, %f0 ! grrr, where is ASI_BLK_NUCLEUS 8-( | |
316 | ldda [%g1 + %g2] ASI_BLK_S, %f16 | |
317 | add %g1, 0x80, %g1 | |
318 | ldda [%g1] ASI_BLK_S, %f32 | |
319 | ldda [%g1 + %g2] ASI_BLK_S, %f48 | |
320 | membar #Sync | |
321 | fpdis_exit: | |
322 | stxa %g5, [%g3] ASI_DMMU | |
323 | membar #Sync | |
324 | fpdis_exit2: | |
325 | wr %g7, 0, %gsr | |
326 | ldx [%g6 + TI_XFSR], %fsr | |
327 | rdpr %tstate, %g3 | |
328 | or %g3, %g4, %g3 ! anal... | |
329 | wrpr %g3, %tstate | |
330 | wr %g0, FPRS_FEF, %fprs ! clean DU/DL bits | |
331 | retry | |
332 | ||
333 | .align 32 | |
334 | fp_other_bounce: | |
335 | call do_fpother | |
336 | add %sp, PTREGS_OFF, %o0 | |
337 | ba,pt %xcc, rtrap | |
338 | clr %l6 | |
339 | ||
340 | .globl do_fpother_check_fitos | |
341 | .align 32 | |
342 | do_fpother_check_fitos: | |
343 | sethi %hi(fp_other_bounce - 4), %g7 | |
344 | or %g7, %lo(fp_other_bounce - 4), %g7 | |
345 | ||
346 | /* NOTE: Need to preserve %g7 until we fully commit | |
347 | * to the fitos fixup. | |
348 | */ | |
349 | stx %fsr, [%g6 + TI_XFSR] | |
350 | rdpr %tstate, %g3 | |
351 | andcc %g3, TSTATE_PRIV, %g0 | |
352 | bne,pn %xcc, do_fptrap_after_fsr | |
353 | nop | |
354 | ldx [%g6 + TI_XFSR], %g3 | |
355 | srlx %g3, 14, %g1 | |
356 | and %g1, 7, %g1 | |
357 | cmp %g1, 2 ! Unfinished FP-OP | |
358 | bne,pn %xcc, do_fptrap_after_fsr | |
359 | sethi %hi(1 << 23), %g1 ! Inexact | |
360 | andcc %g3, %g1, %g0 | |
361 | bne,pn %xcc, do_fptrap_after_fsr | |
362 | rdpr %tpc, %g1 | |
363 | lduwa [%g1] ASI_AIUP, %g3 ! This cannot ever fail | |
364 | #define FITOS_MASK 0xc1f83fe0 | |
365 | #define FITOS_COMPARE 0x81a01880 | |
366 | sethi %hi(FITOS_MASK), %g1 | |
367 | or %g1, %lo(FITOS_MASK), %g1 | |
368 | and %g3, %g1, %g1 | |
369 | sethi %hi(FITOS_COMPARE), %g2 | |
370 | or %g2, %lo(FITOS_COMPARE), %g2 | |
371 | cmp %g1, %g2 | |
372 | bne,pn %xcc, do_fptrap_after_fsr | |
373 | nop | |
374 | std %f62, [%g6 + TI_FPREGS + (62 * 4)] | |
375 | sethi %hi(fitos_table_1), %g1 | |
376 | and %g3, 0x1f, %g2 | |
377 | or %g1, %lo(fitos_table_1), %g1 | |
378 | sllx %g2, 2, %g2 | |
379 | jmpl %g1 + %g2, %g0 | |
380 | ba,pt %xcc, fitos_emul_continue | |
381 | ||
382 | fitos_table_1: | |
383 | fitod %f0, %f62 | |
384 | fitod %f1, %f62 | |
385 | fitod %f2, %f62 | |
386 | fitod %f3, %f62 | |
387 | fitod %f4, %f62 | |
388 | fitod %f5, %f62 | |
389 | fitod %f6, %f62 | |
390 | fitod %f7, %f62 | |
391 | fitod %f8, %f62 | |
392 | fitod %f9, %f62 | |
393 | fitod %f10, %f62 | |
394 | fitod %f11, %f62 | |
395 | fitod %f12, %f62 | |
396 | fitod %f13, %f62 | |
397 | fitod %f14, %f62 | |
398 | fitod %f15, %f62 | |
399 | fitod %f16, %f62 | |
400 | fitod %f17, %f62 | |
401 | fitod %f18, %f62 | |
402 | fitod %f19, %f62 | |
403 | fitod %f20, %f62 | |
404 | fitod %f21, %f62 | |
405 | fitod %f22, %f62 | |
406 | fitod %f23, %f62 | |
407 | fitod %f24, %f62 | |
408 | fitod %f25, %f62 | |
409 | fitod %f26, %f62 | |
410 | fitod %f27, %f62 | |
411 | fitod %f28, %f62 | |
412 | fitod %f29, %f62 | |
413 | fitod %f30, %f62 | |
414 | fitod %f31, %f62 | |
415 | ||
416 | fitos_emul_continue: | |
417 | sethi %hi(fitos_table_2), %g1 | |
418 | srl %g3, 25, %g2 | |
419 | or %g1, %lo(fitos_table_2), %g1 | |
420 | and %g2, 0x1f, %g2 | |
421 | sllx %g2, 2, %g2 | |
422 | jmpl %g1 + %g2, %g0 | |
423 | ba,pt %xcc, fitos_emul_fini | |
424 | ||
425 | fitos_table_2: | |
426 | fdtos %f62, %f0 | |
427 | fdtos %f62, %f1 | |
428 | fdtos %f62, %f2 | |
429 | fdtos %f62, %f3 | |
430 | fdtos %f62, %f4 | |
431 | fdtos %f62, %f5 | |
432 | fdtos %f62, %f6 | |
433 | fdtos %f62, %f7 | |
434 | fdtos %f62, %f8 | |
435 | fdtos %f62, %f9 | |
436 | fdtos %f62, %f10 | |
437 | fdtos %f62, %f11 | |
438 | fdtos %f62, %f12 | |
439 | fdtos %f62, %f13 | |
440 | fdtos %f62, %f14 | |
441 | fdtos %f62, %f15 | |
442 | fdtos %f62, %f16 | |
443 | fdtos %f62, %f17 | |
444 | fdtos %f62, %f18 | |
445 | fdtos %f62, %f19 | |
446 | fdtos %f62, %f20 | |
447 | fdtos %f62, %f21 | |
448 | fdtos %f62, %f22 | |
449 | fdtos %f62, %f23 | |
450 | fdtos %f62, %f24 | |
451 | fdtos %f62, %f25 | |
452 | fdtos %f62, %f26 | |
453 | fdtos %f62, %f27 | |
454 | fdtos %f62, %f28 | |
455 | fdtos %f62, %f29 | |
456 | fdtos %f62, %f30 | |
457 | fdtos %f62, %f31 | |
458 | ||
459 | fitos_emul_fini: | |
460 | ldd [%g6 + TI_FPREGS + (62 * 4)], %f62 | |
461 | done | |
462 | ||
463 | .globl do_fptrap | |
464 | .align 32 | |
465 | do_fptrap: | |
466 | stx %fsr, [%g6 + TI_XFSR] | |
467 | do_fptrap_after_fsr: | |
468 | ldub [%g6 + TI_FPSAVED], %g3 | |
469 | rd %fprs, %g1 | |
470 | or %g3, %g1, %g3 | |
471 | stb %g3, [%g6 + TI_FPSAVED] | |
472 | rd %gsr, %g3 | |
473 | stx %g3, [%g6 + TI_GSR] | |
474 | mov SECONDARY_CONTEXT, %g3 | |
475 | ldxa [%g3] ASI_DMMU, %g5 | |
476 | cplus_fptrap_insn_4: | |
477 | sethi %hi(0), %g2 | |
478 | stxa %g2, [%g3] ASI_DMMU | |
479 | membar #Sync | |
480 | add %g6, TI_FPREGS, %g2 | |
481 | andcc %g1, FPRS_DL, %g0 | |
482 | be,pn %icc, 4f | |
483 | mov 0x40, %g3 | |
484 | stda %f0, [%g2] ASI_BLK_S | |
485 | stda %f16, [%g2 + %g3] ASI_BLK_S | |
486 | andcc %g1, FPRS_DU, %g0 | |
487 | be,pn %icc, 5f | |
488 | 4: add %g2, 128, %g2 | |
489 | stda %f32, [%g2] ASI_BLK_S | |
490 | stda %f48, [%g2 + %g3] ASI_BLK_S | |
491 | 5: mov SECONDARY_CONTEXT, %g1 | |
492 | membar #Sync | |
493 | stxa %g5, [%g1] ASI_DMMU | |
494 | membar #Sync | |
495 | ba,pt %xcc, etrap | |
496 | wr %g0, 0, %fprs | |
497 | ||
498 | cplus_fptrap_1: | |
499 | sethi %hi(CTX_CHEETAH_PLUS_CTX0), %g2 | |
500 | ||
501 | .globl cheetah_plus_patch_fpdis | |
502 | cheetah_plus_patch_fpdis: | |
503 | /* We configure the dTLB512_0 for 4MB pages and the | |
504 | * dTLB512_1 for 8K pages when in context zero. | |
505 | */ | |
506 | sethi %hi(cplus_fptrap_1), %o0 | |
507 | lduw [%o0 + %lo(cplus_fptrap_1)], %o1 | |
508 | ||
509 | set cplus_fptrap_insn_1, %o2 | |
510 | stw %o1, [%o2] | |
511 | flush %o2 | |
512 | set cplus_fptrap_insn_2, %o2 | |
513 | stw %o1, [%o2] | |
514 | flush %o2 | |
515 | set cplus_fptrap_insn_3, %o2 | |
516 | stw %o1, [%o2] | |
517 | flush %o2 | |
518 | set cplus_fptrap_insn_4, %o2 | |
519 | stw %o1, [%o2] | |
520 | flush %o2 | |
521 | ||
522 | retl | |
523 | nop | |
524 | ||
525 | /* The registers for cross calls will be: | |
526 | * | |
527 | * DATA 0: [low 32-bits] Address of function to call, jmp to this | |
528 | * [high 32-bits] MMU Context Argument 0, place in %g5 | |
529 | * DATA 1: Address Argument 1, place in %g6 | |
530 | * DATA 2: Address Argument 2, place in %g7 | |
531 | * | |
532 | * With this method we can do most of the cross-call tlb/cache | |
533 | * flushing very quickly. | |
534 | * | |
535 | * Current CPU's IRQ worklist table is locked into %g1, | |
536 | * don't touch. | |
537 | */ | |
538 | .text | |
539 | .align 32 | |
540 | .globl do_ivec | |
541 | do_ivec: | |
542 | mov 0x40, %g3 | |
543 | ldxa [%g3 + %g0] ASI_INTR_R, %g3 | |
544 | sethi %hi(KERNBASE), %g4 | |
545 | cmp %g3, %g4 | |
546 | bgeu,pn %xcc, do_ivec_xcall | |
547 | srlx %g3, 32, %g5 | |
548 | stxa %g0, [%g0] ASI_INTR_RECEIVE | |
549 | membar #Sync | |
550 | ||
551 | sethi %hi(ivector_table), %g2 | |
552 | sllx %g3, 5, %g3 | |
553 | or %g2, %lo(ivector_table), %g2 | |
554 | add %g2, %g3, %g3 | |
1da177e4 | 555 | ldub [%g3 + 0x04], %g4 /* pil */ |
088dd1f8 | 556 | mov 1, %g2 |
1da177e4 LT |
557 | sllx %g2, %g4, %g2 |
558 | sllx %g4, 2, %g4 | |
088dd1f8 | 559 | |
1da177e4 LT |
560 | lduw [%g6 + %g4], %g5 /* g5 = irq_work(cpu, pil) */ |
561 | stw %g5, [%g3 + 0x00] /* bucket->irq_chain = g5 */ | |
562 | stw %g3, [%g6 + %g4] /* irq_work(cpu, pil) = bucket */ | |
563 | wr %g2, 0x0, %set_softint | |
564 | retry | |
565 | do_ivec_xcall: | |
566 | mov 0x50, %g1 | |
1da177e4 LT |
567 | ldxa [%g1 + %g0] ASI_INTR_R, %g1 |
568 | srl %g3, 0, %g3 | |
088dd1f8 | 569 | |
1da177e4 LT |
570 | mov 0x60, %g7 |
571 | ldxa [%g7 + %g0] ASI_INTR_R, %g7 | |
572 | stxa %g0, [%g0] ASI_INTR_RECEIVE | |
573 | membar #Sync | |
574 | ba,pt %xcc, 1f | |
575 | nop | |
576 | ||
577 | .align 32 | |
578 | 1: jmpl %g3, %g0 | |
579 | nop | |
580 | ||
1da177e4 LT |
581 | .globl save_alternate_globals |
582 | save_alternate_globals: /* %o0 = save_area */ | |
583 | rdpr %pstate, %o5 | |
584 | andn %o5, PSTATE_IE, %o1 | |
585 | wrpr %o1, PSTATE_AG, %pstate | |
586 | stx %g0, [%o0 + 0x00] | |
587 | stx %g1, [%o0 + 0x08] | |
588 | stx %g2, [%o0 + 0x10] | |
589 | stx %g3, [%o0 + 0x18] | |
590 | stx %g4, [%o0 + 0x20] | |
591 | stx %g5, [%o0 + 0x28] | |
592 | stx %g6, [%o0 + 0x30] | |
593 | stx %g7, [%o0 + 0x38] | |
594 | wrpr %o1, PSTATE_IG, %pstate | |
595 | stx %g0, [%o0 + 0x40] | |
596 | stx %g1, [%o0 + 0x48] | |
597 | stx %g2, [%o0 + 0x50] | |
598 | stx %g3, [%o0 + 0x58] | |
599 | stx %g4, [%o0 + 0x60] | |
600 | stx %g5, [%o0 + 0x68] | |
601 | stx %g6, [%o0 + 0x70] | |
602 | stx %g7, [%o0 + 0x78] | |
603 | wrpr %o1, PSTATE_MG, %pstate | |
604 | stx %g0, [%o0 + 0x80] | |
605 | stx %g1, [%o0 + 0x88] | |
606 | stx %g2, [%o0 + 0x90] | |
607 | stx %g3, [%o0 + 0x98] | |
608 | stx %g4, [%o0 + 0xa0] | |
609 | stx %g5, [%o0 + 0xa8] | |
610 | stx %g6, [%o0 + 0xb0] | |
611 | stx %g7, [%o0 + 0xb8] | |
612 | wrpr %o5, 0x0, %pstate | |
613 | retl | |
614 | nop | |
615 | ||
616 | .globl restore_alternate_globals | |
617 | restore_alternate_globals: /* %o0 = save_area */ | |
618 | rdpr %pstate, %o5 | |
619 | andn %o5, PSTATE_IE, %o1 | |
620 | wrpr %o1, PSTATE_AG, %pstate | |
621 | ldx [%o0 + 0x00], %g0 | |
622 | ldx [%o0 + 0x08], %g1 | |
623 | ldx [%o0 + 0x10], %g2 | |
624 | ldx [%o0 + 0x18], %g3 | |
625 | ldx [%o0 + 0x20], %g4 | |
626 | ldx [%o0 + 0x28], %g5 | |
627 | ldx [%o0 + 0x30], %g6 | |
628 | ldx [%o0 + 0x38], %g7 | |
629 | wrpr %o1, PSTATE_IG, %pstate | |
630 | ldx [%o0 + 0x40], %g0 | |
631 | ldx [%o0 + 0x48], %g1 | |
632 | ldx [%o0 + 0x50], %g2 | |
633 | ldx [%o0 + 0x58], %g3 | |
634 | ldx [%o0 + 0x60], %g4 | |
635 | ldx [%o0 + 0x68], %g5 | |
636 | ldx [%o0 + 0x70], %g6 | |
637 | ldx [%o0 + 0x78], %g7 | |
638 | wrpr %o1, PSTATE_MG, %pstate | |
639 | ldx [%o0 + 0x80], %g0 | |
640 | ldx [%o0 + 0x88], %g1 | |
641 | ldx [%o0 + 0x90], %g2 | |
642 | ldx [%o0 + 0x98], %g3 | |
643 | ldx [%o0 + 0xa0], %g4 | |
644 | ldx [%o0 + 0xa8], %g5 | |
645 | ldx [%o0 + 0xb0], %g6 | |
646 | ldx [%o0 + 0xb8], %g7 | |
647 | wrpr %o5, 0x0, %pstate | |
648 | retl | |
649 | nop | |
650 | ||
651 | .globl getcc, setcc | |
652 | getcc: | |
653 | ldx [%o0 + PT_V9_TSTATE], %o1 | |
654 | srlx %o1, 32, %o1 | |
655 | and %o1, 0xf, %o1 | |
656 | retl | |
657 | stx %o1, [%o0 + PT_V9_G1] | |
658 | setcc: | |
659 | ldx [%o0 + PT_V9_TSTATE], %o1 | |
660 | ldx [%o0 + PT_V9_G1], %o2 | |
661 | or %g0, %ulo(TSTATE_ICC), %o3 | |
662 | sllx %o3, 32, %o3 | |
663 | andn %o1, %o3, %o1 | |
664 | sllx %o2, 32, %o2 | |
665 | and %o2, %o3, %o2 | |
666 | or %o1, %o2, %o1 | |
667 | retl | |
668 | stx %o1, [%o0 + PT_V9_TSTATE] | |
669 | ||
670 | .globl utrap, utrap_ill | |
671 | utrap: brz,pn %g1, etrap | |
672 | nop | |
673 | save %sp, -128, %sp | |
674 | rdpr %tstate, %l6 | |
675 | rdpr %cwp, %l7 | |
676 | andn %l6, TSTATE_CWP, %l6 | |
677 | wrpr %l6, %l7, %tstate | |
678 | rdpr %tpc, %l6 | |
679 | rdpr %tnpc, %l7 | |
680 | wrpr %g1, 0, %tnpc | |
681 | done | |
682 | utrap_ill: | |
683 | call bad_trap | |
684 | add %sp, PTREGS_OFF, %o0 | |
685 | ba,pt %xcc, rtrap | |
686 | clr %l6 | |
687 | ||
1da177e4 LT |
688 | /* XXX Here is stuff we still need to write... -DaveM XXX */ |
689 | .globl netbsd_syscall | |
690 | netbsd_syscall: | |
691 | retl | |
692 | nop | |
693 | ||
6c52a96e DM |
694 | /* We need to carefully read the error status, ACK |
695 | * the errors, prevent recursive traps, and pass the | |
696 | * information on to C code for logging. | |
697 | * | |
698 | * We pass the AFAR in as-is, and we encode the status | |
699 | * information as described in asm-sparc64/sfafsr.h | |
700 | */ | |
701 | .globl __spitfire_access_error | |
702 | __spitfire_access_error: | |
703 | /* Disable ESTATE error reporting so that we do not | |
704 | * take recursive traps and RED state the processor. | |
705 | */ | |
706 | stxa %g0, [%g0] ASI_ESTATE_ERROR_EN | |
707 | membar #Sync | |
708 | ||
709 | mov UDBE_UE, %g1 | |
710 | ldxa [%g0] ASI_AFSR, %g4 ! Get AFSR | |
711 | ||
712 | /* __spitfire_cee_trap branches here with AFSR in %g4 and | |
713 | * UDBE_CE in %g1. It only clears ESTATE_ERR_CE in the | |
714 | * ESTATE Error Enable register. | |
715 | */ | |
716 | __spitfire_cee_trap_continue: | |
717 | ldxa [%g0] ASI_AFAR, %g5 ! Get AFAR | |
718 | ||
719 | rdpr %tt, %g3 | |
720 | and %g3, 0x1ff, %g3 ! Paranoia | |
721 | sllx %g3, SFSTAT_TRAP_TYPE_SHIFT, %g3 | |
722 | or %g4, %g3, %g4 | |
723 | rdpr %tl, %g3 | |
724 | cmp %g3, 1 | |
725 | mov 1, %g3 | |
726 | bleu %xcc, 1f | |
727 | sllx %g3, SFSTAT_TL_GT_ONE_SHIFT, %g3 | |
728 | ||
729 | or %g4, %g3, %g4 | |
730 | ||
731 | /* Read in the UDB error register state, clearing the | |
732 | * sticky error bits as-needed. We only clear them if | |
733 | * the UE bit is set. Likewise, __spitfire_cee_trap | |
734 | * below will only do so if the CE bit is set. | |
735 | * | |
736 | * NOTE: UltraSparc-I/II have high and low UDB error | |
737 | * registers, corresponding to the two UDB units | |
738 | * present on those chips. UltraSparc-IIi only | |
739 | * has a single UDB, called "SDB" in the manual. | |
740 | * For IIi the upper UDB register always reads | |
741 | * as zero so for our purposes things will just | |
742 | * work with the checks below. | |
743 | */ | |
744 | 1: ldxa [%g0] ASI_UDBH_ERROR_R, %g3 | |
745 | and %g3, 0x3ff, %g7 ! Paranoia | |
746 | sllx %g7, SFSTAT_UDBH_SHIFT, %g7 | |
747 | or %g4, %g7, %g4 | |
748 | andcc %g3, %g1, %g3 ! UDBE_UE or UDBE_CE | |
749 | be,pn %xcc, 1f | |
750 | nop | |
751 | stxa %g3, [%g0] ASI_UDB_ERROR_W | |
752 | membar #Sync | |
753 | ||
754 | 1: mov 0x18, %g3 | |
755 | ldxa [%g3] ASI_UDBL_ERROR_R, %g3 | |
756 | and %g3, 0x3ff, %g7 ! Paranoia | |
757 | sllx %g7, SFSTAT_UDBL_SHIFT, %g7 | |
758 | or %g4, %g7, %g4 | |
759 | andcc %g3, %g1, %g3 ! UDBE_UE or UDBE_CE | |
760 | be,pn %xcc, 1f | |
761 | nop | |
762 | mov 0x18, %g7 | |
763 | stxa %g3, [%g7] ASI_UDB_ERROR_W | |
764 | membar #Sync | |
765 | ||
766 | 1: /* Ok, now that we've latched the error state, | |
767 | * clear the sticky bits in the AFSR. | |
768 | */ | |
769 | stxa %g4, [%g0] ASI_AFSR | |
770 | membar #Sync | |
771 | ||
772 | rdpr %tl, %g2 | |
773 | cmp %g2, 1 | |
774 | rdpr %pil, %g2 | |
775 | bleu,pt %xcc, 1f | |
776 | wrpr %g0, 15, %pil | |
777 | ||
778 | ba,pt %xcc, etraptl1 | |
779 | rd %pc, %g7 | |
780 | ||
781 | ba,pt %xcc, 2f | |
782 | nop | |
783 | ||
784 | 1: ba,pt %xcc, etrap_irq | |
785 | rd %pc, %g7 | |
786 | ||
787 | 2: mov %l4, %o1 | |
788 | mov %l5, %o2 | |
789 | call spitfire_access_error | |
790 | add %sp, PTREGS_OFF, %o0 | |
791 | ba,pt %xcc, rtrap | |
792 | clr %l6 | |
793 | ||
794 | /* This is the trap handler entry point for ECC correctable | |
795 | * errors. They are corrected, but we listen for the trap | |
796 | * so that the event can be logged. | |
797 | * | |
798 | * Disrupting errors are either: | |
799 | * 1) single-bit ECC errors during UDB reads to system | |
800 | * memory | |
801 | * 2) data parity errors during write-back events | |
802 | * | |
803 | * As far as I can make out from the manual, the CEE trap | |
804 | * is only for correctable errors during memory read | |
805 | * accesses by the front-end of the processor. | |
806 | * | |
807 | * The code below is only for trap level 1 CEE events, | |
808 | * as it is the only situation where we can safely record | |
809 | * and log. For trap level >1 we just clear the CE bit | |
810 | * in the AFSR and return. | |
811 | * | |
812 | * This is just like __spiftire_access_error above, but it | |
813 | * specifically handles correctable errors. If an | |
814 | * uncorrectable error is indicated in the AFSR we | |
815 | * will branch directly above to __spitfire_access_error | |
816 | * to handle it instead. Uncorrectable therefore takes | |
817 | * priority over correctable, and the error logging | |
818 | * C code will notice this case by inspecting the | |
819 | * trap type. | |
820 | */ | |
821 | .globl __spitfire_cee_trap | |
822 | __spitfire_cee_trap: | |
823 | ldxa [%g0] ASI_AFSR, %g4 ! Get AFSR | |
824 | mov 1, %g3 | |
825 | sllx %g3, SFAFSR_UE_SHIFT, %g3 | |
826 | andcc %g4, %g3, %g0 ! Check for UE | |
827 | bne,pn %xcc, __spitfire_access_error | |
828 | nop | |
829 | ||
830 | /* Ok, in this case we only have a correctable error. | |
831 | * Indicate we only wish to capture that state in register | |
832 | * %g1, and we only disable CE error reporting unlike UE | |
833 | * handling which disables all errors. | |
834 | */ | |
835 | ldxa [%g0] ASI_ESTATE_ERROR_EN, %g3 | |
836 | andn %g3, ESTATE_ERR_CE, %g3 | |
837 | stxa %g3, [%g0] ASI_ESTATE_ERROR_EN | |
838 | membar #Sync | |
839 | ||
840 | /* Preserve AFSR in %g4, indicate UDB state to capture in %g1 */ | |
841 | ba,pt %xcc, __spitfire_cee_trap_continue | |
842 | mov UDBE_CE, %g1 | |
843 | ||
844 | .globl __spitfire_data_access_exception | |
845 | .globl __spitfire_data_access_exception_tl1 | |
846 | __spitfire_data_access_exception_tl1: | |
1da177e4 LT |
847 | rdpr %pstate, %g4 |
848 | wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate | |
849 | mov TLB_SFSR, %g3 | |
850 | mov DMMU_SFAR, %g5 | |
851 | ldxa [%g3] ASI_DMMU, %g4 ! Get SFSR | |
852 | ldxa [%g5] ASI_DMMU, %g5 ! Get SFAR | |
853 | stxa %g0, [%g3] ASI_DMMU ! Clear SFSR.FaultValid bit | |
854 | membar #Sync | |
bde4e4ee DM |
855 | rdpr %tt, %g3 |
856 | cmp %g3, 0x80 ! first win spill/fill trap | |
857 | blu,pn %xcc, 1f | |
858 | cmp %g3, 0xff ! last win spill/fill trap | |
859 | bgu,pn %xcc, 1f | |
860 | nop | |
1da177e4 LT |
861 | ba,pt %xcc, winfix_dax |
862 | rdpr %tpc, %g3 | |
bde4e4ee DM |
863 | 1: sethi %hi(109f), %g7 |
864 | ba,pt %xcc, etraptl1 | |
865 | 109: or %g7, %lo(109b), %g7 | |
866 | mov %l4, %o1 | |
867 | mov %l5, %o2 | |
6c52a96e | 868 | call spitfire_data_access_exception_tl1 |
bde4e4ee DM |
869 | add %sp, PTREGS_OFF, %o0 |
870 | ba,pt %xcc, rtrap | |
871 | clr %l6 | |
872 | ||
6c52a96e | 873 | __spitfire_data_access_exception: |
1da177e4 LT |
874 | rdpr %pstate, %g4 |
875 | wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate | |
876 | mov TLB_SFSR, %g3 | |
877 | mov DMMU_SFAR, %g5 | |
878 | ldxa [%g3] ASI_DMMU, %g4 ! Get SFSR | |
879 | ldxa [%g5] ASI_DMMU, %g5 ! Get SFAR | |
880 | stxa %g0, [%g3] ASI_DMMU ! Clear SFSR.FaultValid bit | |
881 | membar #Sync | |
882 | sethi %hi(109f), %g7 | |
883 | ba,pt %xcc, etrap | |
884 | 109: or %g7, %lo(109b), %g7 | |
885 | mov %l4, %o1 | |
886 | mov %l5, %o2 | |
6c52a96e | 887 | call spitfire_data_access_exception |
1da177e4 LT |
888 | add %sp, PTREGS_OFF, %o0 |
889 | ba,pt %xcc, rtrap | |
890 | clr %l6 | |
891 | ||
6c52a96e DM |
892 | .globl __spitfire_insn_access_exception |
893 | .globl __spitfire_insn_access_exception_tl1 | |
894 | __spitfire_insn_access_exception_tl1: | |
1da177e4 LT |
895 | rdpr %pstate, %g4 |
896 | wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate | |
897 | mov TLB_SFSR, %g3 | |
5ea68e02 DM |
898 | ldxa [%g3] ASI_IMMU, %g4 ! Get SFSR |
899 | rdpr %tpc, %g5 ! IMMU has no SFAR, use TPC | |
1da177e4 LT |
900 | stxa %g0, [%g3] ASI_IMMU ! Clear FaultValid bit |
901 | membar #Sync | |
902 | sethi %hi(109f), %g7 | |
903 | ba,pt %xcc, etraptl1 | |
904 | 109: or %g7, %lo(109b), %g7 | |
905 | mov %l4, %o1 | |
906 | mov %l5, %o2 | |
6c52a96e | 907 | call spitfire_insn_access_exception_tl1 |
1da177e4 LT |
908 | add %sp, PTREGS_OFF, %o0 |
909 | ba,pt %xcc, rtrap | |
910 | clr %l6 | |
911 | ||
6c52a96e | 912 | __spitfire_insn_access_exception: |
1da177e4 LT |
913 | rdpr %pstate, %g4 |
914 | wrpr %g4, PSTATE_MG|PSTATE_AG, %pstate | |
915 | mov TLB_SFSR, %g3 | |
5ea68e02 DM |
916 | ldxa [%g3] ASI_IMMU, %g4 ! Get SFSR |
917 | rdpr %tpc, %g5 ! IMMU has no SFAR, use TPC | |
1da177e4 LT |
918 | stxa %g0, [%g3] ASI_IMMU ! Clear FaultValid bit |
919 | membar #Sync | |
920 | sethi %hi(109f), %g7 | |
921 | ba,pt %xcc, etrap | |
922 | 109: or %g7, %lo(109b), %g7 | |
923 | mov %l4, %o1 | |
924 | mov %l5, %o2 | |
6c52a96e | 925 | call spitfire_insn_access_exception |
1da177e4 LT |
926 | add %sp, PTREGS_OFF, %o0 |
927 | ba,pt %xcc, rtrap | |
928 | clr %l6 | |
929 | ||
1da177e4 LT |
930 | /* Capture I/D/E-cache state into per-cpu error scoreboard. |
931 | * | |
932 | * %g1: (TL>=0) ? 1 : 0 | |
933 | * %g2: scratch | |
934 | * %g3: scratch | |
935 | * %g4: AFSR | |
936 | * %g5: AFAR | |
937 | * %g6: current thread ptr | |
938 | * %g7: scratch | |
939 | */ | |
940 | #define CHEETAH_LOG_ERROR \ | |
941 | /* Put "TL1" software bit into AFSR. */ \ | |
942 | and %g1, 0x1, %g1; \ | |
943 | sllx %g1, 63, %g2; \ | |
944 | or %g4, %g2, %g4; \ | |
945 | /* Get log entry pointer for this cpu at this trap level. */ \ | |
946 | BRANCH_IF_JALAPENO(g2,g3,50f) \ | |
947 | ldxa [%g0] ASI_SAFARI_CONFIG, %g2; \ | |
948 | srlx %g2, 17, %g2; \ | |
949 | ba,pt %xcc, 60f; \ | |
950 | and %g2, 0x3ff, %g2; \ | |
951 | 50: ldxa [%g0] ASI_JBUS_CONFIG, %g2; \ | |
952 | srlx %g2, 17, %g2; \ | |
953 | and %g2, 0x1f, %g2; \ | |
954 | 60: sllx %g2, 9, %g2; \ | |
955 | sethi %hi(cheetah_error_log), %g3; \ | |
956 | ldx [%g3 + %lo(cheetah_error_log)], %g3; \ | |
957 | brz,pn %g3, 80f; \ | |
958 | nop; \ | |
959 | add %g3, %g2, %g3; \ | |
960 | sllx %g1, 8, %g1; \ | |
961 | add %g3, %g1, %g1; \ | |
962 | /* %g1 holds pointer to the top of the logging scoreboard */ \ | |
963 | ldx [%g1 + 0x0], %g7; \ | |
964 | cmp %g7, -1; \ | |
965 | bne,pn %xcc, 80f; \ | |
966 | nop; \ | |
967 | stx %g4, [%g1 + 0x0]; \ | |
968 | stx %g5, [%g1 + 0x8]; \ | |
969 | add %g1, 0x10, %g1; \ | |
970 | /* %g1 now points to D-cache logging area */ \ | |
971 | set 0x3ff8, %g2; /* DC_addr mask */ \ | |
972 | and %g5, %g2, %g2; /* DC_addr bits of AFAR */ \ | |
973 | srlx %g5, 12, %g3; \ | |
974 | or %g3, 1, %g3; /* PHYS tag + valid */ \ | |
975 | 10: ldxa [%g2] ASI_DCACHE_TAG, %g7; \ | |
976 | cmp %g3, %g7; /* TAG match? */ \ | |
977 | bne,pt %xcc, 13f; \ | |
978 | nop; \ | |
979 | /* Yep, what we want, capture state. */ \ | |
980 | stx %g2, [%g1 + 0x20]; \ | |
981 | stx %g7, [%g1 + 0x28]; \ | |
982 | /* A membar Sync is required before and after utag access. */ \ | |
983 | membar #Sync; \ | |
984 | ldxa [%g2] ASI_DCACHE_UTAG, %g7; \ | |
985 | membar #Sync; \ | |
986 | stx %g7, [%g1 + 0x30]; \ | |
987 | ldxa [%g2] ASI_DCACHE_SNOOP_TAG, %g7; \ | |
988 | stx %g7, [%g1 + 0x38]; \ | |
989 | clr %g3; \ | |
990 | 12: ldxa [%g2 + %g3] ASI_DCACHE_DATA, %g7; \ | |
991 | stx %g7, [%g1]; \ | |
992 | add %g3, (1 << 5), %g3; \ | |
993 | cmp %g3, (4 << 5); \ | |
994 | bl,pt %xcc, 12b; \ | |
995 | add %g1, 0x8, %g1; \ | |
996 | ba,pt %xcc, 20f; \ | |
997 | add %g1, 0x20, %g1; \ | |
998 | 13: sethi %hi(1 << 14), %g7; \ | |
999 | add %g2, %g7, %g2; \ | |
1000 | srlx %g2, 14, %g7; \ | |
1001 | cmp %g7, 4; \ | |
1002 | bl,pt %xcc, 10b; \ | |
1003 | nop; \ | |
1004 | add %g1, 0x40, %g1; \ | |
1005 | 20: /* %g1 now points to I-cache logging area */ \ | |
1006 | set 0x1fe0, %g2; /* IC_addr mask */ \ | |
1007 | and %g5, %g2, %g2; /* IC_addr bits of AFAR */ \ | |
1008 | sllx %g2, 1, %g2; /* IC_addr[13:6]==VA[12:5] */ \ | |
1009 | srlx %g5, (13 - 8), %g3; /* Make PTAG */ \ | |
1010 | andn %g3, 0xff, %g3; /* Mask off undefined bits */ \ | |
1011 | 21: ldxa [%g2] ASI_IC_TAG, %g7; \ | |
1012 | andn %g7, 0xff, %g7; \ | |
1013 | cmp %g3, %g7; \ | |
1014 | bne,pt %xcc, 23f; \ | |
1015 | nop; \ | |
1016 | /* Yep, what we want, capture state. */ \ | |
1017 | stx %g2, [%g1 + 0x40]; \ | |
1018 | stx %g7, [%g1 + 0x48]; \ | |
1019 | add %g2, (1 << 3), %g2; \ | |
1020 | ldxa [%g2] ASI_IC_TAG, %g7; \ | |
1021 | add %g2, (1 << 3), %g2; \ | |
1022 | stx %g7, [%g1 + 0x50]; \ | |
1023 | ldxa [%g2] ASI_IC_TAG, %g7; \ | |
1024 | add %g2, (1 << 3), %g2; \ | |
1025 | stx %g7, [%g1 + 0x60]; \ | |
1026 | ldxa [%g2] ASI_IC_TAG, %g7; \ | |
1027 | stx %g7, [%g1 + 0x68]; \ | |
1028 | sub %g2, (3 << 3), %g2; \ | |
1029 | ldxa [%g2] ASI_IC_STAG, %g7; \ | |
1030 | stx %g7, [%g1 + 0x58]; \ | |
1031 | clr %g3; \ | |
1032 | srlx %g2, 2, %g2; \ | |
1033 | 22: ldxa [%g2 + %g3] ASI_IC_INSTR, %g7; \ | |
1034 | stx %g7, [%g1]; \ | |
1035 | add %g3, (1 << 3), %g3; \ | |
1036 | cmp %g3, (8 << 3); \ | |
1037 | bl,pt %xcc, 22b; \ | |
1038 | add %g1, 0x8, %g1; \ | |
1039 | ba,pt %xcc, 30f; \ | |
1040 | add %g1, 0x30, %g1; \ | |
1041 | 23: sethi %hi(1 << 14), %g7; \ | |
1042 | add %g2, %g7, %g2; \ | |
1043 | srlx %g2, 14, %g7; \ | |
1044 | cmp %g7, 4; \ | |
1045 | bl,pt %xcc, 21b; \ | |
1046 | nop; \ | |
1047 | add %g1, 0x70, %g1; \ | |
1048 | 30: /* %g1 now points to E-cache logging area */ \ | |
1049 | andn %g5, (32 - 1), %g2; /* E-cache subblock */ \ | |
1050 | stx %g2, [%g1 + 0x20]; \ | |
1051 | ldxa [%g2] ASI_EC_TAG_DATA, %g7; \ | |
1052 | stx %g7, [%g1 + 0x28]; \ | |
1053 | ldxa [%g2] ASI_EC_R, %g0; \ | |
1054 | clr %g3; \ | |
1055 | 31: ldxa [%g3] ASI_EC_DATA, %g7; \ | |
1056 | stx %g7, [%g1 + %g3]; \ | |
1057 | add %g3, 0x8, %g3; \ | |
1058 | cmp %g3, 0x20; \ | |
1059 | bl,pt %xcc, 31b; \ | |
1060 | nop; \ | |
1061 | 80: /* DONE */ | |
1062 | ||
1063 | /* These get patched into the trap table at boot time | |
1064 | * once we know we have a cheetah processor. | |
1065 | */ | |
1066 | .globl cheetah_fecc_trap_vector, cheetah_fecc_trap_vector_tl1 | |
1067 | cheetah_fecc_trap_vector: | |
1068 | membar #Sync | |
1069 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1 | |
1070 | andn %g1, DCU_DC | DCU_IC, %g1 | |
1071 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG | |
1072 | membar #Sync | |
1073 | sethi %hi(cheetah_fast_ecc), %g2 | |
1074 | jmpl %g2 + %lo(cheetah_fast_ecc), %g0 | |
1075 | mov 0, %g1 | |
1076 | cheetah_fecc_trap_vector_tl1: | |
1077 | membar #Sync | |
1078 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1 | |
1079 | andn %g1, DCU_DC | DCU_IC, %g1 | |
1080 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG | |
1081 | membar #Sync | |
1082 | sethi %hi(cheetah_fast_ecc), %g2 | |
1083 | jmpl %g2 + %lo(cheetah_fast_ecc), %g0 | |
1084 | mov 1, %g1 | |
1085 | .globl cheetah_cee_trap_vector, cheetah_cee_trap_vector_tl1 | |
1086 | cheetah_cee_trap_vector: | |
1087 | membar #Sync | |
1088 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1 | |
1089 | andn %g1, DCU_IC, %g1 | |
1090 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG | |
1091 | membar #Sync | |
1092 | sethi %hi(cheetah_cee), %g2 | |
1093 | jmpl %g2 + %lo(cheetah_cee), %g0 | |
1094 | mov 0, %g1 | |
1095 | cheetah_cee_trap_vector_tl1: | |
1096 | membar #Sync | |
1097 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1 | |
1098 | andn %g1, DCU_IC, %g1 | |
1099 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG | |
1100 | membar #Sync | |
1101 | sethi %hi(cheetah_cee), %g2 | |
1102 | jmpl %g2 + %lo(cheetah_cee), %g0 | |
1103 | mov 1, %g1 | |
1104 | .globl cheetah_deferred_trap_vector, cheetah_deferred_trap_vector_tl1 | |
1105 | cheetah_deferred_trap_vector: | |
1106 | membar #Sync | |
1107 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1; | |
1108 | andn %g1, DCU_DC | DCU_IC, %g1; | |
1109 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG; | |
1110 | membar #Sync; | |
1111 | sethi %hi(cheetah_deferred_trap), %g2 | |
1112 | jmpl %g2 + %lo(cheetah_deferred_trap), %g0 | |
1113 | mov 0, %g1 | |
1114 | cheetah_deferred_trap_vector_tl1: | |
1115 | membar #Sync; | |
1116 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1; | |
1117 | andn %g1, DCU_DC | DCU_IC, %g1; | |
1118 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG; | |
1119 | membar #Sync; | |
1120 | sethi %hi(cheetah_deferred_trap), %g2 | |
1121 | jmpl %g2 + %lo(cheetah_deferred_trap), %g0 | |
1122 | mov 1, %g1 | |
1123 | ||
1124 | /* Cheetah+ specific traps. These are for the new I/D cache parity | |
1125 | * error traps. The first argument to cheetah_plus_parity_handler | |
1126 | * is encoded as follows: | |
1127 | * | |
1128 | * Bit0: 0=dcache,1=icache | |
1129 | * Bit1: 0=recoverable,1=unrecoverable | |
1130 | */ | |
1131 | .globl cheetah_plus_dcpe_trap_vector, cheetah_plus_dcpe_trap_vector_tl1 | |
1132 | cheetah_plus_dcpe_trap_vector: | |
1133 | membar #Sync | |
1134 | sethi %hi(do_cheetah_plus_data_parity), %g7 | |
1135 | jmpl %g7 + %lo(do_cheetah_plus_data_parity), %g0 | |
1136 | nop | |
1137 | nop | |
1138 | nop | |
1139 | nop | |
1140 | nop | |
1141 | ||
1142 | do_cheetah_plus_data_parity: | |
1143 | ba,pt %xcc, etrap | |
1144 | rd %pc, %g7 | |
1145 | mov 0x0, %o0 | |
1146 | call cheetah_plus_parity_error | |
1147 | add %sp, PTREGS_OFF, %o1 | |
1148 | ba,pt %xcc, rtrap | |
1149 | clr %l6 | |
1150 | ||
1151 | cheetah_plus_dcpe_trap_vector_tl1: | |
1152 | membar #Sync | |
1153 | wrpr PSTATE_IG | PSTATE_PEF | PSTATE_PRIV, %pstate | |
1154 | sethi %hi(do_dcpe_tl1), %g3 | |
1155 | jmpl %g3 + %lo(do_dcpe_tl1), %g0 | |
1156 | nop | |
1157 | nop | |
1158 | nop | |
1159 | nop | |
1160 | ||
1161 | .globl cheetah_plus_icpe_trap_vector, cheetah_plus_icpe_trap_vector_tl1 | |
1162 | cheetah_plus_icpe_trap_vector: | |
1163 | membar #Sync | |
1164 | sethi %hi(do_cheetah_plus_insn_parity), %g7 | |
1165 | jmpl %g7 + %lo(do_cheetah_plus_insn_parity), %g0 | |
1166 | nop | |
1167 | nop | |
1168 | nop | |
1169 | nop | |
1170 | nop | |
1171 | ||
1172 | do_cheetah_plus_insn_parity: | |
1173 | ba,pt %xcc, etrap | |
1174 | rd %pc, %g7 | |
1175 | mov 0x1, %o0 | |
1176 | call cheetah_plus_parity_error | |
1177 | add %sp, PTREGS_OFF, %o1 | |
1178 | ba,pt %xcc, rtrap | |
1179 | clr %l6 | |
1180 | ||
1181 | cheetah_plus_icpe_trap_vector_tl1: | |
1182 | membar #Sync | |
1183 | wrpr PSTATE_IG | PSTATE_PEF | PSTATE_PRIV, %pstate | |
1184 | sethi %hi(do_icpe_tl1), %g3 | |
1185 | jmpl %g3 + %lo(do_icpe_tl1), %g0 | |
1186 | nop | |
1187 | nop | |
1188 | nop | |
1189 | nop | |
1190 | ||
1191 | /* If we take one of these traps when tl >= 1, then we | |
1192 | * jump to interrupt globals. If some trap level above us | |
1193 | * was also using interrupt globals, we cannot recover. | |
1194 | * We may use all interrupt global registers except %g6. | |
1195 | */ | |
1196 | .globl do_dcpe_tl1, do_icpe_tl1 | |
1197 | do_dcpe_tl1: | |
1198 | rdpr %tl, %g1 ! Save original trap level | |
1199 | mov 1, %g2 ! Setup TSTATE checking loop | |
1200 | sethi %hi(TSTATE_IG), %g3 ! TSTATE mask bit | |
1201 | 1: wrpr %g2, %tl ! Set trap level to check | |
1202 | rdpr %tstate, %g4 ! Read TSTATE for this level | |
1203 | andcc %g4, %g3, %g0 ! Interrupt globals in use? | |
1204 | bne,a,pn %xcc, do_dcpe_tl1_fatal ! Yep, irrecoverable | |
1205 | wrpr %g1, %tl ! Restore original trap level | |
1206 | add %g2, 1, %g2 ! Next trap level | |
1207 | cmp %g2, %g1 ! Hit them all yet? | |
1208 | ble,pt %icc, 1b ! Not yet | |
1209 | nop | |
1210 | wrpr %g1, %tl ! Restore original trap level | |
1211 | do_dcpe_tl1_nonfatal: /* Ok we may use interrupt globals safely. */ | |
1212 | /* Reset D-cache parity */ | |
1213 | sethi %hi(1 << 16), %g1 ! D-cache size | |
1214 | mov (1 << 5), %g2 ! D-cache line size | |
1215 | sub %g1, %g2, %g1 ! Move down 1 cacheline | |
1216 | 1: srl %g1, 14, %g3 ! Compute UTAG | |
1217 | membar #Sync | |
1218 | stxa %g3, [%g1] ASI_DCACHE_UTAG | |
1219 | membar #Sync | |
1220 | sub %g2, 8, %g3 ! 64-bit data word within line | |
1221 | 2: membar #Sync | |
1222 | stxa %g0, [%g1 + %g3] ASI_DCACHE_DATA | |
1223 | membar #Sync | |
1224 | subcc %g3, 8, %g3 ! Next 64-bit data word | |
1225 | bge,pt %icc, 2b | |
1226 | nop | |
1227 | subcc %g1, %g2, %g1 ! Next cacheline | |
1228 | bge,pt %icc, 1b | |
1229 | nop | |
1230 | ba,pt %xcc, dcpe_icpe_tl1_common | |
1231 | nop | |
1232 | ||
1233 | do_dcpe_tl1_fatal: | |
1234 | sethi %hi(1f), %g7 | |
1235 | ba,pt %xcc, etraptl1 | |
1236 | 1: or %g7, %lo(1b), %g7 | |
1237 | mov 0x2, %o0 | |
1238 | call cheetah_plus_parity_error | |
1239 | add %sp, PTREGS_OFF, %o1 | |
1240 | ba,pt %xcc, rtrap | |
1241 | clr %l6 | |
1242 | ||
1243 | do_icpe_tl1: | |
1244 | rdpr %tl, %g1 ! Save original trap level | |
1245 | mov 1, %g2 ! Setup TSTATE checking loop | |
1246 | sethi %hi(TSTATE_IG), %g3 ! TSTATE mask bit | |
1247 | 1: wrpr %g2, %tl ! Set trap level to check | |
1248 | rdpr %tstate, %g4 ! Read TSTATE for this level | |
1249 | andcc %g4, %g3, %g0 ! Interrupt globals in use? | |
1250 | bne,a,pn %xcc, do_icpe_tl1_fatal ! Yep, irrecoverable | |
1251 | wrpr %g1, %tl ! Restore original trap level | |
1252 | add %g2, 1, %g2 ! Next trap level | |
1253 | cmp %g2, %g1 ! Hit them all yet? | |
1254 | ble,pt %icc, 1b ! Not yet | |
1255 | nop | |
1256 | wrpr %g1, %tl ! Restore original trap level | |
1257 | do_icpe_tl1_nonfatal: /* Ok we may use interrupt globals safely. */ | |
1258 | /* Flush I-cache */ | |
1259 | sethi %hi(1 << 15), %g1 ! I-cache size | |
1260 | mov (1 << 5), %g2 ! I-cache line size | |
1261 | sub %g1, %g2, %g1 | |
1262 | 1: or %g1, (2 << 3), %g3 | |
1263 | stxa %g0, [%g3] ASI_IC_TAG | |
1264 | membar #Sync | |
1265 | subcc %g1, %g2, %g1 | |
1266 | bge,pt %icc, 1b | |
1267 | nop | |
1268 | ba,pt %xcc, dcpe_icpe_tl1_common | |
1269 | nop | |
1270 | ||
1271 | do_icpe_tl1_fatal: | |
1272 | sethi %hi(1f), %g7 | |
1273 | ba,pt %xcc, etraptl1 | |
1274 | 1: or %g7, %lo(1b), %g7 | |
1275 | mov 0x3, %o0 | |
1276 | call cheetah_plus_parity_error | |
1277 | add %sp, PTREGS_OFF, %o1 | |
1278 | ba,pt %xcc, rtrap | |
1279 | clr %l6 | |
1280 | ||
1281 | dcpe_icpe_tl1_common: | |
1282 | /* Flush D-cache, re-enable D/I caches in DCU and finally | |
1283 | * retry the trapping instruction. | |
1284 | */ | |
1285 | sethi %hi(1 << 16), %g1 ! D-cache size | |
1286 | mov (1 << 5), %g2 ! D-cache line size | |
1287 | sub %g1, %g2, %g1 | |
1288 | 1: stxa %g0, [%g1] ASI_DCACHE_TAG | |
1289 | membar #Sync | |
1290 | subcc %g1, %g2, %g1 | |
1291 | bge,pt %icc, 1b | |
1292 | nop | |
1293 | ldxa [%g0] ASI_DCU_CONTROL_REG, %g1 | |
1294 | or %g1, (DCU_DC | DCU_IC), %g1 | |
1295 | stxa %g1, [%g0] ASI_DCU_CONTROL_REG | |
1296 | membar #Sync | |
1297 | retry | |
1298 | ||
1299 | /* Cheetah FECC trap handling, we get here from tl{0,1}_fecc | |
1300 | * in the trap table. That code has done a memory barrier | |
1301 | * and has disabled both the I-cache and D-cache in the DCU | |
1302 | * control register. The I-cache is disabled so that we may | |
1303 | * capture the corrupted cache line, and the D-cache is disabled | |
1304 | * because corrupt data may have been placed there and we don't | |
1305 | * want to reference it. | |
1306 | * | |
1307 | * %g1 is one if this trap occurred at %tl >= 1. | |
1308 | * | |
1309 | * Next, we turn off error reporting so that we don't recurse. | |
1310 | */ | |
1311 | .globl cheetah_fast_ecc | |
1312 | cheetah_fast_ecc: | |
1313 | ldxa [%g0] ASI_ESTATE_ERROR_EN, %g2 | |
1314 | andn %g2, ESTATE_ERROR_NCEEN | ESTATE_ERROR_CEEN, %g2 | |
1315 | stxa %g2, [%g0] ASI_ESTATE_ERROR_EN | |
1316 | membar #Sync | |
1317 | ||
1318 | /* Fetch and clear AFSR/AFAR */ | |
1319 | ldxa [%g0] ASI_AFSR, %g4 | |
1320 | ldxa [%g0] ASI_AFAR, %g5 | |
1321 | stxa %g4, [%g0] ASI_AFSR | |
1322 | membar #Sync | |
1323 | ||
1324 | CHEETAH_LOG_ERROR | |
1325 | ||
1326 | rdpr %pil, %g2 | |
1327 | wrpr %g0, 15, %pil | |
1328 | ba,pt %xcc, etrap_irq | |
1329 | rd %pc, %g7 | |
1330 | mov %l4, %o1 | |
1331 | mov %l5, %o2 | |
1332 | call cheetah_fecc_handler | |
1333 | add %sp, PTREGS_OFF, %o0 | |
1334 | ba,a,pt %xcc, rtrap_irq | |
1335 | ||
1336 | /* Our caller has disabled I-cache and performed membar Sync. */ | |
1337 | .globl cheetah_cee | |
1338 | cheetah_cee: | |
1339 | ldxa [%g0] ASI_ESTATE_ERROR_EN, %g2 | |
1340 | andn %g2, ESTATE_ERROR_CEEN, %g2 | |
1341 | stxa %g2, [%g0] ASI_ESTATE_ERROR_EN | |
1342 | membar #Sync | |
1343 | ||
1344 | /* Fetch and clear AFSR/AFAR */ | |
1345 | ldxa [%g0] ASI_AFSR, %g4 | |
1346 | ldxa [%g0] ASI_AFAR, %g5 | |
1347 | stxa %g4, [%g0] ASI_AFSR | |
1348 | membar #Sync | |
1349 | ||
1350 | CHEETAH_LOG_ERROR | |
1351 | ||
1352 | rdpr %pil, %g2 | |
1353 | wrpr %g0, 15, %pil | |
1354 | ba,pt %xcc, etrap_irq | |
1355 | rd %pc, %g7 | |
1356 | mov %l4, %o1 | |
1357 | mov %l5, %o2 | |
1358 | call cheetah_cee_handler | |
1359 | add %sp, PTREGS_OFF, %o0 | |
1360 | ba,a,pt %xcc, rtrap_irq | |
1361 | ||
1362 | /* Our caller has disabled I-cache+D-cache and performed membar Sync. */ | |
1363 | .globl cheetah_deferred_trap | |
1364 | cheetah_deferred_trap: | |
1365 | ldxa [%g0] ASI_ESTATE_ERROR_EN, %g2 | |
1366 | andn %g2, ESTATE_ERROR_NCEEN | ESTATE_ERROR_CEEN, %g2 | |
1367 | stxa %g2, [%g0] ASI_ESTATE_ERROR_EN | |
1368 | membar #Sync | |
1369 | ||
1370 | /* Fetch and clear AFSR/AFAR */ | |
1371 | ldxa [%g0] ASI_AFSR, %g4 | |
1372 | ldxa [%g0] ASI_AFAR, %g5 | |
1373 | stxa %g4, [%g0] ASI_AFSR | |
1374 | membar #Sync | |
1375 | ||
1376 | CHEETAH_LOG_ERROR | |
1377 | ||
1378 | rdpr %pil, %g2 | |
1379 | wrpr %g0, 15, %pil | |
1380 | ba,pt %xcc, etrap_irq | |
1381 | rd %pc, %g7 | |
1382 | mov %l4, %o1 | |
1383 | mov %l5, %o2 | |
1384 | call cheetah_deferred_handler | |
1385 | add %sp, PTREGS_OFF, %o0 | |
1386 | ba,a,pt %xcc, rtrap_irq | |
1387 | ||
1388 | .globl __do_privact | |
1389 | __do_privact: | |
1390 | mov TLB_SFSR, %g3 | |
1391 | stxa %g0, [%g3] ASI_DMMU ! Clear FaultValid bit | |
1392 | membar #Sync | |
1393 | sethi %hi(109f), %g7 | |
1394 | ba,pt %xcc, etrap | |
1395 | 109: or %g7, %lo(109b), %g7 | |
1396 | call do_privact | |
1397 | add %sp, PTREGS_OFF, %o0 | |
1398 | ba,pt %xcc, rtrap | |
1399 | clr %l6 | |
1400 | ||
1401 | .globl do_mna | |
1402 | do_mna: | |
1403 | rdpr %tl, %g3 | |
1404 | cmp %g3, 1 | |
1405 | ||
1406 | /* Setup %g4/%g5 now as they are used in the | |
1407 | * winfixup code. | |
1408 | */ | |
1409 | mov TLB_SFSR, %g3 | |
1410 | mov DMMU_SFAR, %g4 | |
1411 | ldxa [%g4] ASI_DMMU, %g4 | |
1412 | ldxa [%g3] ASI_DMMU, %g5 | |
1413 | stxa %g0, [%g3] ASI_DMMU ! Clear FaultValid bit | |
1414 | membar #Sync | |
1415 | bgu,pn %icc, winfix_mna | |
1416 | rdpr %tpc, %g3 | |
1417 | ||
1418 | 1: sethi %hi(109f), %g7 | |
1419 | ba,pt %xcc, etrap | |
1420 | 109: or %g7, %lo(109b), %g7 | |
1421 | mov %l4, %o1 | |
1422 | mov %l5, %o2 | |
1423 | call mem_address_unaligned | |
1424 | add %sp, PTREGS_OFF, %o0 | |
1425 | ba,pt %xcc, rtrap | |
1426 | clr %l6 | |
1427 | ||
1428 | .globl do_lddfmna | |
1429 | do_lddfmna: | |
1430 | sethi %hi(109f), %g7 | |
1431 | mov TLB_SFSR, %g4 | |
1432 | ldxa [%g4] ASI_DMMU, %g5 | |
1433 | stxa %g0, [%g4] ASI_DMMU ! Clear FaultValid bit | |
1434 | membar #Sync | |
1435 | mov DMMU_SFAR, %g4 | |
1436 | ldxa [%g4] ASI_DMMU, %g4 | |
1437 | ba,pt %xcc, etrap | |
1438 | 109: or %g7, %lo(109b), %g7 | |
1439 | mov %l4, %o1 | |
1440 | mov %l5, %o2 | |
1441 | call handle_lddfmna | |
1442 | add %sp, PTREGS_OFF, %o0 | |
1443 | ba,pt %xcc, rtrap | |
1444 | clr %l6 | |
1445 | ||
1446 | .globl do_stdfmna | |
1447 | do_stdfmna: | |
1448 | sethi %hi(109f), %g7 | |
1449 | mov TLB_SFSR, %g4 | |
1450 | ldxa [%g4] ASI_DMMU, %g5 | |
1451 | stxa %g0, [%g4] ASI_DMMU ! Clear FaultValid bit | |
1452 | membar #Sync | |
1453 | mov DMMU_SFAR, %g4 | |
1454 | ldxa [%g4] ASI_DMMU, %g4 | |
1455 | ba,pt %xcc, etrap | |
1456 | 109: or %g7, %lo(109b), %g7 | |
1457 | mov %l4, %o1 | |
1458 | mov %l5, %o2 | |
1459 | call handle_stdfmna | |
1460 | add %sp, PTREGS_OFF, %o0 | |
1461 | ba,pt %xcc, rtrap | |
1462 | clr %l6 | |
1463 | ||
1464 | .globl breakpoint_trap | |
1465 | breakpoint_trap: | |
1466 | call sparc_breakpoint | |
1467 | add %sp, PTREGS_OFF, %o0 | |
1468 | ba,pt %xcc, rtrap | |
1469 | nop | |
1470 | ||
1471 | #if defined(CONFIG_SUNOS_EMUL) || defined(CONFIG_SOLARIS_EMUL) || \ | |
1472 | defined(CONFIG_SOLARIS_EMUL_MODULE) | |
1473 | /* SunOS uses syscall zero as the 'indirect syscall' it looks | |
1474 | * like indir_syscall(scall_num, arg0, arg1, arg2...); etc. | |
1475 | * This is complete brain damage. | |
1476 | */ | |
1477 | .globl sunos_indir | |
1478 | sunos_indir: | |
1479 | srl %o0, 0, %o0 | |
1480 | mov %o7, %l4 | |
1481 | cmp %o0, NR_SYSCALLS | |
1482 | blu,a,pt %icc, 1f | |
1483 | sll %o0, 0x2, %o0 | |
1484 | sethi %hi(sunos_nosys), %l6 | |
1485 | b,pt %xcc, 2f | |
1486 | or %l6, %lo(sunos_nosys), %l6 | |
1487 | 1: sethi %hi(sunos_sys_table), %l7 | |
1488 | or %l7, %lo(sunos_sys_table), %l7 | |
1489 | lduw [%l7 + %o0], %l6 | |
1490 | 2: mov %o1, %o0 | |
1491 | mov %o2, %o1 | |
1492 | mov %o3, %o2 | |
1493 | mov %o4, %o3 | |
1494 | mov %o5, %o4 | |
1495 | call %l6 | |
1496 | mov %l4, %o7 | |
1497 | ||
1498 | .globl sunos_getpid | |
1499 | sunos_getpid: | |
1500 | call sys_getppid | |
1501 | nop | |
1502 | call sys_getpid | |
1503 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I1] | |
1504 | b,pt %xcc, ret_sys_call | |
1505 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I0] | |
1506 | ||
1507 | /* SunOS getuid() returns uid in %o0 and euid in %o1 */ | |
1508 | .globl sunos_getuid | |
1509 | sunos_getuid: | |
1510 | call sys32_geteuid16 | |
1511 | nop | |
1512 | call sys32_getuid16 | |
1513 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I1] | |
1514 | b,pt %xcc, ret_sys_call | |
1515 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I0] | |
1516 | ||
1517 | /* SunOS getgid() returns gid in %o0 and egid in %o1 */ | |
1518 | .globl sunos_getgid | |
1519 | sunos_getgid: | |
1520 | call sys32_getegid16 | |
1521 | nop | |
1522 | call sys32_getgid16 | |
1523 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I1] | |
1524 | b,pt %xcc, ret_sys_call | |
1525 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I0] | |
1526 | #endif | |
1527 | ||
1528 | /* SunOS's execv() call only specifies the argv argument, the | |
1529 | * environment settings are the same as the calling processes. | |
1530 | */ | |
1531 | .globl sunos_execv | |
1532 | sys_execve: | |
1533 | sethi %hi(sparc_execve), %g1 | |
1534 | ba,pt %xcc, execve_merge | |
1535 | or %g1, %lo(sparc_execve), %g1 | |
1536 | #ifdef CONFIG_COMPAT | |
1537 | .globl sys_execve | |
1538 | sunos_execv: | |
1539 | stx %g0, [%sp + PTREGS_OFF + PT_V9_I2] | |
1540 | .globl sys32_execve | |
1541 | sys32_execve: | |
1542 | sethi %hi(sparc32_execve), %g1 | |
1543 | or %g1, %lo(sparc32_execve), %g1 | |
1544 | #endif | |
1545 | execve_merge: | |
1546 | flushw | |
1547 | jmpl %g1, %g0 | |
1548 | add %sp, PTREGS_OFF, %o0 | |
1549 | ||
1550 | .globl sys_pipe, sys_sigpause, sys_nis_syscall | |
1551 | .globl sys_sigsuspend, sys_rt_sigsuspend | |
1552 | .globl sys_rt_sigreturn | |
1553 | .globl sys_ptrace | |
1554 | .globl sys_sigaltstack | |
1555 | .align 32 | |
1556 | sys_pipe: ba,pt %xcc, sparc_pipe | |
1557 | add %sp, PTREGS_OFF, %o0 | |
1558 | sys_nis_syscall:ba,pt %xcc, c_sys_nis_syscall | |
1559 | add %sp, PTREGS_OFF, %o0 | |
1560 | sys_memory_ordering: | |
1561 | ba,pt %xcc, sparc_memory_ordering | |
1562 | add %sp, PTREGS_OFF, %o1 | |
1563 | sys_sigaltstack:ba,pt %xcc, do_sigaltstack | |
1564 | add %i6, STACK_BIAS, %o2 | |
1565 | #ifdef CONFIG_COMPAT | |
1566 | .globl sys32_sigstack | |
1567 | sys32_sigstack: ba,pt %xcc, do_sys32_sigstack | |
1568 | mov %i6, %o2 | |
1569 | .globl sys32_sigaltstack | |
1570 | sys32_sigaltstack: | |
1571 | ba,pt %xcc, do_sys32_sigaltstack | |
1572 | mov %i6, %o2 | |
1573 | #endif | |
1574 | .align 32 | |
1575 | sys_sigsuspend: add %sp, PTREGS_OFF, %o0 | |
1576 | call do_sigsuspend | |
1577 | add %o7, 1f-.-4, %o7 | |
1578 | nop | |
1579 | sys_rt_sigsuspend: /* NOTE: %o0,%o1 have a correct value already */ | |
1580 | add %sp, PTREGS_OFF, %o2 | |
1581 | call do_rt_sigsuspend | |
1582 | add %o7, 1f-.-4, %o7 | |
1583 | nop | |
1584 | #ifdef CONFIG_COMPAT | |
1585 | .globl sys32_rt_sigsuspend | |
1586 | sys32_rt_sigsuspend: /* NOTE: %o0,%o1 have a correct value already */ | |
1587 | srl %o0, 0, %o0 | |
1588 | add %sp, PTREGS_OFF, %o2 | |
1589 | call do_rt_sigsuspend32 | |
1590 | add %o7, 1f-.-4, %o7 | |
1591 | #endif | |
1592 | /* NOTE: %o0 has a correct value already */ | |
1593 | sys_sigpause: add %sp, PTREGS_OFF, %o1 | |
1594 | call do_sigpause | |
1595 | add %o7, 1f-.-4, %o7 | |
1596 | nop | |
1597 | #ifdef CONFIG_COMPAT | |
1598 | .globl sys32_sigreturn | |
1599 | sys32_sigreturn: | |
1600 | add %sp, PTREGS_OFF, %o0 | |
1601 | call do_sigreturn32 | |
1602 | add %o7, 1f-.-4, %o7 | |
1603 | nop | |
1604 | #endif | |
1605 | sys_rt_sigreturn: | |
1606 | add %sp, PTREGS_OFF, %o0 | |
1607 | call do_rt_sigreturn | |
1608 | add %o7, 1f-.-4, %o7 | |
1609 | nop | |
1610 | #ifdef CONFIG_COMPAT | |
1611 | .globl sys32_rt_sigreturn | |
1612 | sys32_rt_sigreturn: | |
1613 | add %sp, PTREGS_OFF, %o0 | |
1614 | call do_rt_sigreturn32 | |
1615 | add %o7, 1f-.-4, %o7 | |
1616 | nop | |
1617 | #endif | |
1618 | sys_ptrace: add %sp, PTREGS_OFF, %o0 | |
1619 | call do_ptrace | |
1620 | add %o7, 1f-.-4, %o7 | |
1621 | nop | |
1622 | .align 32 | |
1623 | 1: ldx [%curptr + TI_FLAGS], %l5 | |
f7ceba36 | 1624 | andcc %l5, (_TIF_SYSCALL_TRACE|_TIF_SECCOMP|_TIF_SYSCALL_AUDIT), %g0 |
1da177e4 LT |
1625 | be,pt %icc, rtrap |
1626 | clr %l6 | |
8d8a6479 | 1627 | add %sp, PTREGS_OFF, %o0 |
1da177e4 | 1628 | call syscall_trace |
8d8a6479 | 1629 | mov 1, %o1 |
1da177e4 LT |
1630 | |
1631 | ba,pt %xcc, rtrap | |
1632 | clr %l6 | |
1633 | ||
1634 | /* This is how fork() was meant to be done, 8 instruction entry. | |
1635 | * | |
1636 | * I questioned the following code briefly, let me clear things | |
1637 | * up so you must not reason on it like I did. | |
1638 | * | |
1639 | * Know the fork_kpsr etc. we use in the sparc32 port? We don't | |
1640 | * need it here because the only piece of window state we copy to | |
1641 | * the child is the CWP register. Even if the parent sleeps, | |
1642 | * we are safe because we stuck it into pt_regs of the parent | |
1643 | * so it will not change. | |
1644 | * | |
1645 | * XXX This raises the question, whether we can do the same on | |
1646 | * XXX sparc32 to get rid of fork_kpsr _and_ fork_kwim. The | |
1647 | * XXX answer is yes. We stick fork_kpsr in UREG_G0 and | |
1648 | * XXX fork_kwim in UREG_G1 (global registers are considered | |
1649 | * XXX volatile across a system call in the sparc ABI I think | |
1650 | * XXX if it isn't we can use regs->y instead, anyone who depends | |
1651 | * XXX upon the Y register being preserved across a fork deserves | |
1652 | * XXX to lose). | |
1653 | * | |
1654 | * In fact we should take advantage of that fact for other things | |
1655 | * during system calls... | |
1656 | */ | |
1657 | .globl sys_fork, sys_vfork, sys_clone, sparc_exit | |
1658 | .globl ret_from_syscall | |
1659 | .align 32 | |
1660 | sys_vfork: /* Under Linux, vfork and fork are just special cases of clone. */ | |
1661 | sethi %hi(0x4000 | 0x0100 | SIGCHLD), %o0 | |
1662 | or %o0, %lo(0x4000 | 0x0100 | SIGCHLD), %o0 | |
1663 | ba,pt %xcc, sys_clone | |
1664 | sys_fork: clr %o1 | |
1665 | mov SIGCHLD, %o0 | |
1666 | sys_clone: flushw | |
1667 | movrz %o1, %fp, %o1 | |
1668 | mov 0, %o3 | |
1669 | ba,pt %xcc, sparc_do_fork | |
1670 | add %sp, PTREGS_OFF, %o2 | |
1671 | ret_from_syscall: | |
db7d9a4e DM |
1672 | /* Clear current_thread_info()->new_child, and |
1673 | * check performance counter stuff too. | |
1da177e4 | 1674 | */ |
db7d9a4e DM |
1675 | stb %g0, [%g6 + TI_NEW_CHILD] |
1676 | ldx [%g6 + TI_FLAGS], %l0 | |
1da177e4 LT |
1677 | call schedule_tail |
1678 | mov %g7, %o0 | |
1679 | andcc %l0, _TIF_PERFCTR, %g0 | |
1680 | be,pt %icc, 1f | |
1681 | nop | |
1682 | ldx [%g6 + TI_PCR], %o7 | |
1683 | wr %g0, %o7, %pcr | |
1684 | ||
1685 | /* Blackbird errata workaround. See commentary in | |
1686 | * smp.c:smp_percpu_timer_interrupt() for more | |
1687 | * information. | |
1688 | */ | |
1689 | ba,pt %xcc, 99f | |
1690 | nop | |
1691 | .align 64 | |
1692 | 99: wr %g0, %g0, %pic | |
1693 | rd %pic, %g0 | |
1694 | ||
1695 | 1: b,pt %xcc, ret_sys_call | |
1696 | ldx [%sp + PTREGS_OFF + PT_V9_I0], %o0 | |
1697 | sparc_exit: wrpr %g0, (PSTATE_RMO | PSTATE_PEF | PSTATE_PRIV), %pstate | |
1698 | rdpr %otherwin, %g1 | |
1699 | rdpr %cansave, %g3 | |
1700 | add %g3, %g1, %g3 | |
1701 | wrpr %g3, 0x0, %cansave | |
1702 | wrpr %g0, 0x0, %otherwin | |
1703 | wrpr %g0, (PSTATE_RMO | PSTATE_PEF | PSTATE_PRIV | PSTATE_IE), %pstate | |
1704 | ba,pt %xcc, sys_exit | |
1705 | stb %g0, [%g6 + TI_WSAVED] | |
1706 | ||
1707 | linux_sparc_ni_syscall: | |
1708 | sethi %hi(sys_ni_syscall), %l7 | |
1709 | b,pt %xcc, 4f | |
1710 | or %l7, %lo(sys_ni_syscall), %l7 | |
1711 | ||
1712 | linux_syscall_trace32: | |
8d8a6479 | 1713 | add %sp, PTREGS_OFF, %o0 |
1da177e4 | 1714 | call syscall_trace |
8d8a6479 | 1715 | clr %o1 |
1da177e4 | 1716 | srl %i0, 0, %o0 |
8d8a6479 | 1717 | srl %i4, 0, %o4 |
1da177e4 LT |
1718 | srl %i1, 0, %o1 |
1719 | srl %i2, 0, %o2 | |
1720 | b,pt %xcc, 2f | |
1721 | srl %i3, 0, %o3 | |
1722 | ||
1723 | linux_syscall_trace: | |
8d8a6479 | 1724 | add %sp, PTREGS_OFF, %o0 |
1da177e4 | 1725 | call syscall_trace |
8d8a6479 | 1726 | clr %o1 |
1da177e4 LT |
1727 | mov %i0, %o0 |
1728 | mov %i1, %o1 | |
1729 | mov %i2, %o2 | |
1730 | mov %i3, %o3 | |
1731 | b,pt %xcc, 2f | |
1732 | mov %i4, %o4 | |
1733 | ||
1734 | ||
1735 | /* Linux 32-bit and SunOS system calls enter here... */ | |
1736 | .align 32 | |
1737 | .globl linux_sparc_syscall32 | |
1738 | linux_sparc_syscall32: | |
1739 | /* Direct access to user regs, much faster. */ | |
1740 | cmp %g1, NR_SYSCALLS ! IEU1 Group | |
1741 | bgeu,pn %xcc, linux_sparc_ni_syscall ! CTI | |
1742 | srl %i0, 0, %o0 ! IEU0 | |
1743 | sll %g1, 2, %l4 ! IEU0 Group | |
1da177e4 LT |
1744 | srl %i4, 0, %o4 ! IEU1 |
1745 | lduw [%l7 + %l4], %l7 ! Load | |
1746 | srl %i1, 0, %o1 ! IEU0 Group | |
1747 | ldx [%curptr + TI_FLAGS], %l0 ! Load | |
1748 | ||
1749 | srl %i5, 0, %o5 ! IEU1 | |
1750 | srl %i2, 0, %o2 ! IEU0 Group | |
f7ceba36 | 1751 | andcc %l0, (_TIF_SYSCALL_TRACE|_TIF_SECCOMP|_TIF_SYSCALL_AUDIT), %g0 |
1da177e4 LT |
1752 | bne,pn %icc, linux_syscall_trace32 ! CTI |
1753 | mov %i0, %l5 ! IEU1 | |
1754 | call %l7 ! CTI Group brk forced | |
1755 | srl %i3, 0, %o3 ! IEU0 | |
1756 | ba,a,pt %xcc, 3f | |
1757 | ||
1758 | /* Linux native and SunOS system calls enter here... */ | |
1759 | .align 32 | |
1760 | .globl linux_sparc_syscall, ret_sys_call | |
1761 | linux_sparc_syscall: | |
1762 | /* Direct access to user regs, much faster. */ | |
1763 | cmp %g1, NR_SYSCALLS ! IEU1 Group | |
1764 | bgeu,pn %xcc, linux_sparc_ni_syscall ! CTI | |
1765 | mov %i0, %o0 ! IEU0 | |
1766 | sll %g1, 2, %l4 ! IEU0 Group | |
1da177e4 LT |
1767 | mov %i1, %o1 ! IEU1 |
1768 | lduw [%l7 + %l4], %l7 ! Load | |
1769 | 4: mov %i2, %o2 ! IEU0 Group | |
1770 | ldx [%curptr + TI_FLAGS], %l0 ! Load | |
1771 | ||
1772 | mov %i3, %o3 ! IEU1 | |
1773 | mov %i4, %o4 ! IEU0 Group | |
f7ceba36 | 1774 | andcc %l0, (_TIF_SYSCALL_TRACE|_TIF_SECCOMP|_TIF_SYSCALL_AUDIT), %g0 |
1da177e4 LT |
1775 | bne,pn %icc, linux_syscall_trace ! CTI Group |
1776 | mov %i0, %l5 ! IEU0 | |
1777 | 2: call %l7 ! CTI Group brk forced | |
1778 | mov %i5, %o5 ! IEU0 | |
1779 | nop | |
1780 | ||
1781 | 3: stx %o0, [%sp + PTREGS_OFF + PT_V9_I0] | |
1782 | ret_sys_call: | |
1da177e4 LT |
1783 | ldx [%sp + PTREGS_OFF + PT_V9_TSTATE], %g3 |
1784 | ldx [%sp + PTREGS_OFF + PT_V9_TNPC], %l1 ! pc = npc | |
1785 | sra %o0, 0, %o0 | |
1786 | mov %ulo(TSTATE_XCARRY | TSTATE_ICARRY), %g2 | |
1787 | sllx %g2, 32, %g2 | |
1788 | ||
1789 | /* Check if force_successful_syscall_return() | |
1790 | * was invoked. | |
1791 | */ | |
db7d9a4e DM |
1792 | ldub [%curptr + TI_SYS_NOERROR], %l0 |
1793 | brz,pt %l0, 1f | |
1794 | nop | |
1da177e4 | 1795 | ba,pt %xcc, 80f |
db7d9a4e | 1796 | stb %g0, [%curptr + TI_SYS_NOERROR] |
1da177e4 LT |
1797 | |
1798 | 1: | |
1799 | cmp %o0, -ERESTART_RESTARTBLOCK | |
1800 | bgeu,pn %xcc, 1f | |
f7ceba36 | 1801 | andcc %l0, (_TIF_SYSCALL_TRACE|_TIF_SECCOMP|_TIF_SYSCALL_AUDIT), %l6 |
1da177e4 LT |
1802 | 80: |
1803 | /* System call success, clear Carry condition code. */ | |
1804 | andn %g3, %g2, %g3 | |
1805 | stx %g3, [%sp + PTREGS_OFF + PT_V9_TSTATE] | |
1806 | bne,pn %icc, linux_syscall_trace2 | |
1807 | add %l1, 0x4, %l2 ! npc = npc+4 | |
1808 | stx %l1, [%sp + PTREGS_OFF + PT_V9_TPC] | |
1809 | ba,pt %xcc, rtrap_clr_l6 | |
1810 | stx %l2, [%sp + PTREGS_OFF + PT_V9_TNPC] | |
1811 | ||
1812 | 1: | |
1813 | /* System call failure, set Carry condition code. | |
1814 | * Also, get abs(errno) to return to the process. | |
1815 | */ | |
f7ceba36 | 1816 | andcc %l0, (_TIF_SYSCALL_TRACE|_TIF_SECCOMP|_TIF_SYSCALL_AUDIT), %l6 |
1da177e4 LT |
1817 | sub %g0, %o0, %o0 |
1818 | or %g3, %g2, %g3 | |
1819 | stx %o0, [%sp + PTREGS_OFF + PT_V9_I0] | |
1820 | mov 1, %l6 | |
1821 | stx %g3, [%sp + PTREGS_OFF + PT_V9_TSTATE] | |
1822 | bne,pn %icc, linux_syscall_trace2 | |
1823 | add %l1, 0x4, %l2 ! npc = npc+4 | |
1824 | stx %l1, [%sp + PTREGS_OFF + PT_V9_TPC] | |
1825 | ||
1826 | b,pt %xcc, rtrap | |
1827 | stx %l2, [%sp + PTREGS_OFF + PT_V9_TNPC] | |
1828 | linux_syscall_trace2: | |
8d8a6479 | 1829 | add %sp, PTREGS_OFF, %o0 |
1da177e4 | 1830 | call syscall_trace |
8d8a6479 | 1831 | mov 1, %o1 |
1da177e4 LT |
1832 | stx %l1, [%sp + PTREGS_OFF + PT_V9_TPC] |
1833 | ba,pt %xcc, rtrap | |
1834 | stx %l2, [%sp + PTREGS_OFF + PT_V9_TNPC] | |
1835 | ||
1836 | .align 32 | |
1837 | .globl __flushw_user | |
1838 | __flushw_user: | |
1839 | rdpr %otherwin, %g1 | |
1840 | brz,pn %g1, 2f | |
1841 | clr %g2 | |
1842 | 1: save %sp, -128, %sp | |
1843 | rdpr %otherwin, %g1 | |
1844 | brnz,pt %g1, 1b | |
1845 | add %g2, 1, %g2 | |
1846 | 1: sub %g2, 1, %g2 | |
1847 | brnz,pt %g2, 1b | |
1848 | restore %g0, %g0, %g0 | |
1849 | 2: retl | |
1850 | nop |