Commit | Line | Data |
---|---|---|
2a7e2990 DM |
1 | /* arch/sparc64/kernel/ktlb.S: Kernel mapping TLB miss handling. |
2 | * | |
bf4a7972 | 3 | * Copyright (C) 1995, 1997, 2005, 2008 David S. Miller <davem@davemloft.net> |
2a7e2990 DM |
4 | * Copyright (C) 1996 Eddie C. Dost (ecd@brainaid.de) |
5 | * Copyright (C) 1996 Miguel de Icaza (miguel@nuclecu.unam.mx) | |
6 | * Copyright (C) 1996,98,99 Jakub Jelinek (jj@sunsite.mff.cuni.cz) | |
74bf4312 | 7 | */ |
2a7e2990 | 8 | |
2a7e2990 DM |
9 | #include <asm/head.h> |
10 | #include <asm/asi.h> | |
11 | #include <asm/page.h> | |
12 | #include <asm/pgtable.h> | |
74bf4312 | 13 | #include <asm/tsb.h> |
2a7e2990 DM |
14 | |
15 | .text | |
16 | .align 32 | |
17 | ||
74bf4312 DM |
18 | kvmap_itlb: |
19 | /* g6: TAG TARGET */ | |
20 | mov TLB_TAG_ACCESS, %g4 | |
21 | ldxa [%g4] ASI_IMMU, %g4 | |
22 | ||
d257d5da DM |
23 | /* sun4v_itlb_miss branches here with the missing virtual |
24 | * address already loaded into %g4 | |
25 | */ | |
26 | kvmap_itlb_4v: | |
27 | ||
74bf4312 DM |
28 | kvmap_itlb_nonlinear: |
29 | /* Catch kernel NULL pointer calls. */ | |
30 | sethi %hi(PAGE_SIZE), %g5 | |
31 | cmp %g4, %g5 | |
32 | bleu,pn %xcc, kvmap_dtlb_longpath | |
33 | nop | |
34 | ||
35 | KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_itlb_load) | |
36 | ||
37 | kvmap_itlb_tsb_miss: | |
2a7e2990 DM |
38 | sethi %hi(LOW_OBP_ADDRESS), %g5 |
39 | cmp %g4, %g5 | |
74bf4312 | 40 | blu,pn %xcc, kvmap_itlb_vmalloc_addr |
2a7e2990 DM |
41 | mov 0x1, %g5 |
42 | sllx %g5, 32, %g5 | |
43 | cmp %g4, %g5 | |
74bf4312 | 44 | blu,pn %xcc, kvmap_itlb_obp |
2a7e2990 DM |
45 | nop |
46 | ||
74bf4312 DM |
47 | kvmap_itlb_vmalloc_addr: |
48 | KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_itlb_longpath) | |
49 | ||
459b6e62 | 50 | KTSB_LOCK_TAG(%g1, %g2, %g7) |
74bf4312 DM |
51 | |
52 | /* Load and check PTE. */ | |
53 | ldxa [%g5] ASI_PHYS_USE_EC, %g5 | |
8b234274 DM |
54 | mov 1, %g7 |
55 | sllx %g7, TSB_TAG_INVALID_BIT, %g7 | |
74bf4312 | 56 | brgez,a,pn %g5, kvmap_itlb_longpath |
8b234274 | 57 | KTSB_STORE(%g1, %g7) |
2a7e2990 | 58 | |
517af332 | 59 | KTSB_WRITE(%g1, %g5, %g6) |
74bf4312 DM |
60 | |
61 | /* fallthrough to TLB load */ | |
62 | ||
63 | kvmap_itlb_load: | |
459b6e62 DM |
64 | |
65 | 661: stxa %g5, [%g0] ASI_ITLB_DATA_IN | |
2a7e2990 | 66 | retry |
459b6e62 DM |
67 | .section .sun4v_2insn_patch, "ax" |
68 | .word 661b | |
69 | nop | |
70 | nop | |
71 | .previous | |
72 | ||
73 | /* For sun4v the ASI_ITLB_DATA_IN store and the retry | |
74 | * instruction get nop'd out and we get here to branch | |
75 | * to the sun4v tlb load code. The registers are setup | |
76 | * as follows: | |
77 | * | |
78 | * %g4: vaddr | |
79 | * %g5: PTE | |
80 | * %g6: TAG | |
81 | * | |
82 | * The sun4v TLB load wants the PTE in %g3 so we fix that | |
83 | * up here. | |
84 | */ | |
85 | ba,pt %xcc, sun4v_itlb_load | |
86 | mov %g5, %g3 | |
2a7e2990 | 87 | |
74bf4312 | 88 | kvmap_itlb_longpath: |
45fec05f DM |
89 | |
90 | 661: rdpr %pstate, %g5 | |
74bf4312 | 91 | wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate |
df7d6aec | 92 | .section .sun4v_2insn_patch, "ax" |
45fec05f | 93 | .word 661b |
6cc200db | 94 | SET_GL(1) |
45fec05f DM |
95 | nop |
96 | .previous | |
97 | ||
74bf4312 DM |
98 | rdpr %tpc, %g5 |
99 | ba,pt %xcc, sparc64_realfault_common | |
100 | mov FAULT_CODE_ITLB, %g4 | |
101 | ||
102 | kvmap_itlb_obp: | |
103 | OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_itlb_longpath) | |
104 | ||
459b6e62 | 105 | KTSB_LOCK_TAG(%g1, %g2, %g7) |
74bf4312 | 106 | |
517af332 | 107 | KTSB_WRITE(%g1, %g5, %g6) |
74bf4312 DM |
108 | |
109 | ba,pt %xcc, kvmap_itlb_load | |
110 | nop | |
111 | ||
112 | kvmap_dtlb_obp: | |
113 | OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_dtlb_longpath) | |
114 | ||
459b6e62 | 115 | KTSB_LOCK_TAG(%g1, %g2, %g7) |
74bf4312 | 116 | |
517af332 | 117 | KTSB_WRITE(%g1, %g5, %g6) |
74bf4312 DM |
118 | |
119 | ba,pt %xcc, kvmap_dtlb_load | |
120 | nop | |
c9c10830 | 121 | |
2a7e2990 | 122 | .align 32 |
d7744a09 DM |
123 | kvmap_dtlb_tsb4m_load: |
124 | KTSB_LOCK_TAG(%g1, %g2, %g7) | |
125 | KTSB_WRITE(%g1, %g5, %g6) | |
126 | ba,pt %xcc, kvmap_dtlb_load | |
127 | nop | |
128 | ||
74bf4312 DM |
129 | kvmap_dtlb: |
130 | /* %g6: TAG TARGET */ | |
131 | mov TLB_TAG_ACCESS, %g4 | |
132 | ldxa [%g4] ASI_DMMU, %g4 | |
d257d5da DM |
133 | |
134 | /* sun4v_dtlb_miss branches here with the missing virtual | |
135 | * address already loaded into %g4 | |
136 | */ | |
137 | kvmap_dtlb_4v: | |
74bf4312 | 138 | brgez,pn %g4, kvmap_dtlb_nonlinear |
56425306 DM |
139 | nop |
140 | ||
d1acb421 DM |
141 | #ifdef CONFIG_DEBUG_PAGEALLOC |
142 | /* Index through the base page size TSB even for linear | |
143 | * mappings when using page allocation debugging. | |
144 | */ | |
145 | KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load) | |
146 | #else | |
d7744a09 DM |
147 | /* Correct TAG_TARGET is already in %g6, check 4mb TSB. */ |
148 | KERN_TSB4M_LOOKUP_TL1(%g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load) | |
d1acb421 | 149 | #endif |
d7744a09 DM |
150 | /* TSB entry address left in %g1, lookup linear PTE. |
151 | * Must preserve %g1 and %g6 (TAG). | |
152 | */ | |
153 | kvmap_dtlb_tsb4m_miss: | |
d8ed1d43 DM |
154 | /* Clear the PAGE_OFFSET top virtual bits, shift |
155 | * down to get PFN, and make sure PFN is in range. | |
156 | */ | |
157 | sllx %g4, 21, %g5 | |
9cc3a1ac | 158 | |
d8ed1d43 DM |
159 | /* Check to see if we know about valid memory at the 4MB |
160 | * chunk this physical address will reside within. | |
9cc3a1ac | 161 | */ |
d8ed1d43 DM |
162 | srlx %g5, 21 + 41, %g2 |
163 | brnz,pn %g2, kvmap_dtlb_longpath | |
164 | nop | |
165 | ||
166 | /* This unconditional branch and delay-slot nop gets patched | |
167 | * by the sethi sequence once the bitmap is properly setup. | |
168 | */ | |
169 | .globl valid_addr_bitmap_insn | |
170 | valid_addr_bitmap_insn: | |
171 | ba,pt %xcc, 2f | |
172 | nop | |
173 | .subsection 2 | |
174 | .globl valid_addr_bitmap_patch | |
175 | valid_addr_bitmap_patch: | |
176 | sethi %hi(sparc64_valid_addr_bitmap), %g7 | |
177 | or %g7, %lo(sparc64_valid_addr_bitmap), %g7 | |
178 | .previous | |
179 | ||
180 | srlx %g5, 21 + 22, %g2 | |
181 | srlx %g2, 6, %g5 | |
182 | and %g2, 63, %g2 | |
183 | sllx %g5, 3, %g5 | |
184 | ldx [%g7 + %g5], %g5 | |
185 | mov 1, %g7 | |
186 | sllx %g7, %g2, %g7 | |
187 | andcc %g5, %g7, %g0 | |
188 | be,pn %xcc, kvmap_dtlb_longpath | |
189 | ||
190 | 2: sethi %hi(kpte_linear_bitmap), %g2 | |
191 | or %g2, %lo(kpte_linear_bitmap), %g2 | |
192 | ||
193 | /* Get the 256MB physical address index. */ | |
9cc3a1ac DM |
194 | sllx %g4, 21, %g5 |
195 | mov 1, %g7 | |
196 | srlx %g5, 21 + 28, %g5 | |
197 | ||
198 | /* Don't try this at home kids... this depends upon srlx | |
199 | * only taking the low 6 bits of the shift count in %g5. | |
200 | */ | |
201 | sllx %g7, %g5, %g7 | |
202 | ||
203 | /* Divide by 64 to get the offset into the bitmask. */ | |
204 | srlx %g5, 6, %g5 | |
6889331a | 205 | sllx %g5, 3, %g5 |
9cc3a1ac DM |
206 | |
207 | /* kern_linear_pte_xor[((mask & bit) ? 1 : 0)] */ | |
208 | ldx [%g2 + %g5], %g2 | |
209 | andcc %g2, %g7, %g0 | |
210 | sethi %hi(kern_linear_pte_xor), %g5 | |
211 | or %g5, %lo(kern_linear_pte_xor), %g5 | |
212 | bne,a,pt %xcc, 1f | |
213 | add %g5, 8, %g5 | |
214 | ||
215 | 1: ldx [%g5], %g2 | |
74bf4312 | 216 | |
56425306 DM |
217 | .globl kvmap_linear_patch |
218 | kvmap_linear_patch: | |
d7744a09 | 219 | ba,pt %xcc, kvmap_dtlb_tsb4m_load |
2a7e2990 DM |
220 | xor %g2, %g4, %g5 |
221 | ||
74bf4312 DM |
222 | kvmap_dtlb_vmalloc_addr: |
223 | KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath) | |
224 | ||
459b6e62 | 225 | KTSB_LOCK_TAG(%g1, %g2, %g7) |
74bf4312 DM |
226 | |
227 | /* Load and check PTE. */ | |
228 | ldxa [%g5] ASI_PHYS_USE_EC, %g5 | |
8b234274 DM |
229 | mov 1, %g7 |
230 | sllx %g7, TSB_TAG_INVALID_BIT, %g7 | |
74bf4312 | 231 | brgez,a,pn %g5, kvmap_dtlb_longpath |
8b234274 | 232 | KTSB_STORE(%g1, %g7) |
74bf4312 | 233 | |
517af332 | 234 | KTSB_WRITE(%g1, %g5, %g6) |
74bf4312 DM |
235 | |
236 | /* fallthrough to TLB load */ | |
237 | ||
238 | kvmap_dtlb_load: | |
459b6e62 DM |
239 | |
240 | 661: stxa %g5, [%g0] ASI_DTLB_DATA_IN ! Reload TLB | |
74bf4312 | 241 | retry |
459b6e62 DM |
242 | .section .sun4v_2insn_patch, "ax" |
243 | .word 661b | |
244 | nop | |
245 | nop | |
246 | .previous | |
247 | ||
248 | /* For sun4v the ASI_DTLB_DATA_IN store and the retry | |
249 | * instruction get nop'd out and we get here to branch | |
250 | * to the sun4v tlb load code. The registers are setup | |
251 | * as follows: | |
252 | * | |
253 | * %g4: vaddr | |
254 | * %g5: PTE | |
255 | * %g6: TAG | |
256 | * | |
257 | * The sun4v TLB load wants the PTE in %g3 so we fix that | |
258 | * up here. | |
259 | */ | |
260 | ba,pt %xcc, sun4v_dtlb_load | |
261 | mov %g5, %g3 | |
74bf4312 | 262 | |
bf4a7972 | 263 | #ifdef CONFIG_SPARSEMEM_VMEMMAP |
46644c24 DM |
264 | kvmap_vmemmap: |
265 | sub %g4, %g5, %g5 | |
266 | srlx %g5, 22, %g5 | |
267 | sethi %hi(vmemmap_table), %g1 | |
268 | sllx %g5, 3, %g5 | |
269 | or %g1, %lo(vmemmap_table), %g1 | |
270 | ba,pt %xcc, kvmap_dtlb_load | |
271 | ldx [%g1 + %g5], %g5 | |
bf4a7972 | 272 | #endif |
46644c24 | 273 | |
74bf4312 DM |
274 | kvmap_dtlb_nonlinear: |
275 | /* Catch kernel NULL pointer derefs. */ | |
276 | sethi %hi(PAGE_SIZE), %g5 | |
277 | cmp %g4, %g5 | |
278 | bleu,pn %xcc, kvmap_dtlb_longpath | |
56425306 | 279 | nop |
56425306 | 280 | |
bf4a7972 | 281 | #ifdef CONFIG_SPARSEMEM_VMEMMAP |
46644c24 | 282 | /* Do not use the TSB for vmemmap. */ |
1b6b9d62 DM |
283 | mov (VMEMMAP_BASE >> 40), %g5 |
284 | sllx %g5, 40, %g5 | |
46644c24 DM |
285 | cmp %g4,%g5 |
286 | bgeu,pn %xcc, kvmap_vmemmap | |
287 | nop | |
bf4a7972 | 288 | #endif |
46644c24 | 289 | |
74bf4312 DM |
290 | KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load) |
291 | ||
292 | kvmap_dtlb_tsbmiss: | |
2a7e2990 DM |
293 | sethi %hi(MODULES_VADDR), %g5 |
294 | cmp %g4, %g5 | |
74bf4312 | 295 | blu,pn %xcc, kvmap_dtlb_longpath |
1b6b9d62 DM |
296 | mov (VMALLOC_END >> 40), %g5 |
297 | sllx %g5, 40, %g5 | |
2a7e2990 | 298 | cmp %g4, %g5 |
74bf4312 | 299 | bgeu,pn %xcc, kvmap_dtlb_longpath |
2a7e2990 DM |
300 | nop |
301 | ||
302 | kvmap_check_obp: | |
303 | sethi %hi(LOW_OBP_ADDRESS), %g5 | |
304 | cmp %g4, %g5 | |
74bf4312 | 305 | blu,pn %xcc, kvmap_dtlb_vmalloc_addr |
2a7e2990 DM |
306 | mov 0x1, %g5 |
307 | sllx %g5, 32, %g5 | |
308 | cmp %g4, %g5 | |
74bf4312 | 309 | blu,pn %xcc, kvmap_dtlb_obp |
2a7e2990 | 310 | nop |
74bf4312 | 311 | ba,pt %xcc, kvmap_dtlb_vmalloc_addr |
2a7e2990 DM |
312 | nop |
313 | ||
74bf4312 | 314 | kvmap_dtlb_longpath: |
45fec05f DM |
315 | |
316 | 661: rdpr %pstate, %g5 | |
74bf4312 | 317 | wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate |
df7d6aec | 318 | .section .sun4v_2insn_patch, "ax" |
45fec05f | 319 | .word 661b |
8b234274 DM |
320 | SET_GL(1) |
321 | ldxa [%g0] ASI_SCRATCHPAD, %g5 | |
45fec05f DM |
322 | .previous |
323 | ||
459b6e62 DM |
324 | rdpr %tl, %g3 |
325 | cmp %g3, 1 | |
326 | ||
327 | 661: mov TLB_TAG_ACCESS, %g4 | |
74bf4312 | 328 | ldxa [%g4] ASI_DMMU, %g5 |
459b6e62 DM |
329 | .section .sun4v_2insn_patch, "ax" |
330 | .word 661b | |
8b234274 | 331 | ldx [%g5 + HV_FAULT_D_ADDR_OFFSET], %g5 |
459b6e62 DM |
332 | nop |
333 | .previous | |
334 | ||
74bf4312 DM |
335 | be,pt %xcc, sparc64_realfault_common |
336 | mov FAULT_CODE_DTLB, %g4 | |
337 | ba,pt %xcc, winfix_trampoline | |
338 | nop |