Commit | Line | Data |
---|---|---|
2a7e2990 DM |
1 | /* arch/sparc64/kernel/ktlb.S: Kernel mapping TLB miss handling. |
2 | * | |
bf4a7972 | 3 | * Copyright (C) 1995, 1997, 2005, 2008 David S. Miller <davem@davemloft.net> |
2a7e2990 DM |
4 | * Copyright (C) 1996 Eddie C. Dost (ecd@brainaid.de) |
5 | * Copyright (C) 1996 Miguel de Icaza (miguel@nuclecu.unam.mx) | |
6 | * Copyright (C) 1996,98,99 Jakub Jelinek (jj@sunsite.mff.cuni.cz) | |
74bf4312 | 7 | */ |
2a7e2990 | 8 | |
2a7e2990 DM |
9 | #include <asm/head.h> |
10 | #include <asm/asi.h> | |
11 | #include <asm/page.h> | |
12 | #include <asm/pgtable.h> | |
74bf4312 | 13 | #include <asm/tsb.h> |
2a7e2990 DM |
14 | |
15 | .text | |
16 | .align 32 | |
17 | ||
74bf4312 DM |
18 | kvmap_itlb: |
19 | /* g6: TAG TARGET */ | |
20 | mov TLB_TAG_ACCESS, %g4 | |
21 | ldxa [%g4] ASI_IMMU, %g4 | |
22 | ||
d257d5da DM |
23 | /* sun4v_itlb_miss branches here with the missing virtual |
24 | * address already loaded into %g4 | |
25 | */ | |
26 | kvmap_itlb_4v: | |
27 | ||
74bf4312 DM |
28 | /* Catch kernel NULL pointer calls. */ |
29 | sethi %hi(PAGE_SIZE), %g5 | |
30 | cmp %g4, %g5 | |
1c2696cd | 31 | blu,pn %xcc, kvmap_itlb_longpath |
74bf4312 DM |
32 | nop |
33 | ||
34 | KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_itlb_load) | |
35 | ||
36 | kvmap_itlb_tsb_miss: | |
2a7e2990 DM |
37 | sethi %hi(LOW_OBP_ADDRESS), %g5 |
38 | cmp %g4, %g5 | |
74bf4312 | 39 | blu,pn %xcc, kvmap_itlb_vmalloc_addr |
2a7e2990 DM |
40 | mov 0x1, %g5 |
41 | sllx %g5, 32, %g5 | |
42 | cmp %g4, %g5 | |
74bf4312 | 43 | blu,pn %xcc, kvmap_itlb_obp |
2a7e2990 DM |
44 | nop |
45 | ||
74bf4312 DM |
46 | kvmap_itlb_vmalloc_addr: |
47 | KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_itlb_longpath) | |
48 | ||
9076d0e7 | 49 | TSB_LOCK_TAG(%g1, %g2, %g7) |
74bf4312 DM |
50 | |
51 | /* Load and check PTE. */ | |
52 | ldxa [%g5] ASI_PHYS_USE_EC, %g5 | |
8b234274 DM |
53 | mov 1, %g7 |
54 | sllx %g7, TSB_TAG_INVALID_BIT, %g7 | |
74bf4312 | 55 | brgez,a,pn %g5, kvmap_itlb_longpath |
9076d0e7 | 56 | TSB_STORE(%g1, %g7) |
2a7e2990 | 57 | |
9076d0e7 | 58 | TSB_WRITE(%g1, %g5, %g6) |
74bf4312 DM |
59 | |
60 | /* fallthrough to TLB load */ | |
61 | ||
62 | kvmap_itlb_load: | |
459b6e62 DM |
63 | |
64 | 661: stxa %g5, [%g0] ASI_ITLB_DATA_IN | |
2a7e2990 | 65 | retry |
459b6e62 DM |
66 | .section .sun4v_2insn_patch, "ax" |
67 | .word 661b | |
68 | nop | |
69 | nop | |
70 | .previous | |
71 | ||
72 | /* For sun4v the ASI_ITLB_DATA_IN store and the retry | |
73 | * instruction get nop'd out and we get here to branch | |
74 | * to the sun4v tlb load code. The registers are setup | |
75 | * as follows: | |
76 | * | |
77 | * %g4: vaddr | |
78 | * %g5: PTE | |
79 | * %g6: TAG | |
80 | * | |
81 | * The sun4v TLB load wants the PTE in %g3 so we fix that | |
82 | * up here. | |
83 | */ | |
84 | ba,pt %xcc, sun4v_itlb_load | |
85 | mov %g5, %g3 | |
2a7e2990 | 86 | |
74bf4312 | 87 | kvmap_itlb_longpath: |
45fec05f DM |
88 | |
89 | 661: rdpr %pstate, %g5 | |
74bf4312 | 90 | wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate |
df7d6aec | 91 | .section .sun4v_2insn_patch, "ax" |
45fec05f | 92 | .word 661b |
6cc200db | 93 | SET_GL(1) |
45fec05f DM |
94 | nop |
95 | .previous | |
96 | ||
74bf4312 DM |
97 | rdpr %tpc, %g5 |
98 | ba,pt %xcc, sparc64_realfault_common | |
99 | mov FAULT_CODE_ITLB, %g4 | |
100 | ||
101 | kvmap_itlb_obp: | |
102 | OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_itlb_longpath) | |
103 | ||
9076d0e7 | 104 | TSB_LOCK_TAG(%g1, %g2, %g7) |
74bf4312 | 105 | |
9076d0e7 | 106 | TSB_WRITE(%g1, %g5, %g6) |
74bf4312 DM |
107 | |
108 | ba,pt %xcc, kvmap_itlb_load | |
109 | nop | |
110 | ||
111 | kvmap_dtlb_obp: | |
112 | OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_dtlb_longpath) | |
113 | ||
9076d0e7 | 114 | TSB_LOCK_TAG(%g1, %g2, %g7) |
74bf4312 | 115 | |
9076d0e7 | 116 | TSB_WRITE(%g1, %g5, %g6) |
74bf4312 DM |
117 | |
118 | ba,pt %xcc, kvmap_dtlb_load | |
119 | nop | |
c9c10830 | 120 | |
2a7e2990 | 121 | .align 32 |
d7744a09 | 122 | kvmap_dtlb_tsb4m_load: |
9076d0e7 DM |
123 | TSB_LOCK_TAG(%g1, %g2, %g7) |
124 | TSB_WRITE(%g1, %g5, %g6) | |
d7744a09 DM |
125 | ba,pt %xcc, kvmap_dtlb_load |
126 | nop | |
127 | ||
74bf4312 DM |
128 | kvmap_dtlb: |
129 | /* %g6: TAG TARGET */ | |
130 | mov TLB_TAG_ACCESS, %g4 | |
131 | ldxa [%g4] ASI_DMMU, %g4 | |
d257d5da DM |
132 | |
133 | /* sun4v_dtlb_miss branches here with the missing virtual | |
134 | * address already loaded into %g4 | |
135 | */ | |
136 | kvmap_dtlb_4v: | |
74bf4312 | 137 | brgez,pn %g4, kvmap_dtlb_nonlinear |
56425306 DM |
138 | nop |
139 | ||
d1acb421 DM |
140 | #ifdef CONFIG_DEBUG_PAGEALLOC |
141 | /* Index through the base page size TSB even for linear | |
142 | * mappings when using page allocation debugging. | |
143 | */ | |
144 | KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load) | |
145 | #else | |
d7744a09 DM |
146 | /* Correct TAG_TARGET is already in %g6, check 4mb TSB. */ |
147 | KERN_TSB4M_LOOKUP_TL1(%g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load) | |
d1acb421 | 148 | #endif |
d7744a09 DM |
149 | /* TSB entry address left in %g1, lookup linear PTE. |
150 | * Must preserve %g1 and %g6 (TAG). | |
151 | */ | |
152 | kvmap_dtlb_tsb4m_miss: | |
d8ed1d43 DM |
153 | /* Clear the PAGE_OFFSET top virtual bits, shift |
154 | * down to get PFN, and make sure PFN is in range. | |
155 | */ | |
b2d43834 DM |
156 | 661: sllx %g4, 0, %g5 |
157 | .section .page_offset_shift_patch, "ax" | |
158 | .word 661b | |
159 | .previous | |
9cc3a1ac | 160 | |
d8ed1d43 DM |
161 | /* Check to see if we know about valid memory at the 4MB |
162 | * chunk this physical address will reside within. | |
9cc3a1ac | 163 | */ |
b2d43834 DM |
164 | 661: srlx %g5, MAX_PHYS_ADDRESS_BITS, %g2 |
165 | .section .page_offset_shift_patch, "ax" | |
166 | .word 661b | |
167 | .previous | |
168 | ||
d8ed1d43 DM |
169 | brnz,pn %g2, kvmap_dtlb_longpath |
170 | nop | |
171 | ||
172 | /* This unconditional branch and delay-slot nop gets patched | |
173 | * by the sethi sequence once the bitmap is properly setup. | |
174 | */ | |
175 | .globl valid_addr_bitmap_insn | |
176 | valid_addr_bitmap_insn: | |
177 | ba,pt %xcc, 2f | |
178 | nop | |
179 | .subsection 2 | |
180 | .globl valid_addr_bitmap_patch | |
181 | valid_addr_bitmap_patch: | |
182 | sethi %hi(sparc64_valid_addr_bitmap), %g7 | |
183 | or %g7, %lo(sparc64_valid_addr_bitmap), %g7 | |
184 | .previous | |
185 | ||
b2d43834 DM |
186 | 661: srlx %g5, ILOG2_4MB, %g2 |
187 | .section .page_offset_shift_patch, "ax" | |
188 | .word 661b | |
189 | .previous | |
190 | ||
d8ed1d43 DM |
191 | srlx %g2, 6, %g5 |
192 | and %g2, 63, %g2 | |
193 | sllx %g5, 3, %g5 | |
194 | ldx [%g7 + %g5], %g5 | |
195 | mov 1, %g7 | |
196 | sllx %g7, %g2, %g7 | |
197 | andcc %g5, %g7, %g0 | |
198 | be,pn %xcc, kvmap_dtlb_longpath | |
199 | ||
200 | 2: sethi %hi(kpte_linear_bitmap), %g2 | |
d8ed1d43 DM |
201 | |
202 | /* Get the 256MB physical address index. */ | |
b2d43834 DM |
203 | 661: sllx %g4, 0, %g5 |
204 | .section .page_offset_shift_patch, "ax" | |
205 | .word 661b | |
206 | .previous | |
207 | ||
4f93d21d | 208 | or %g2, %lo(kpte_linear_bitmap), %g2 |
b2d43834 DM |
209 | |
210 | 661: srlx %g5, ILOG2_256MB, %g5 | |
211 | .section .page_offset_shift_patch, "ax" | |
212 | .word 661b | |
213 | .previous | |
214 | ||
4f93d21d | 215 | and %g5, (32 - 1), %g7 |
9cc3a1ac | 216 | |
4f93d21d DM |
217 | /* Divide by 32 to get the offset into the bitmask. */ |
218 | srlx %g5, 5, %g5 | |
219 | add %g7, %g7, %g7 | |
6889331a | 220 | sllx %g5, 3, %g5 |
9cc3a1ac | 221 | |
4f93d21d | 222 | /* kern_linear_pte_xor[(mask >> shift) & 3)] */ |
9cc3a1ac | 223 | ldx [%g2 + %g5], %g2 |
4f93d21d | 224 | srlx %g2, %g7, %g7 |
9cc3a1ac | 225 | sethi %hi(kern_linear_pte_xor), %g5 |
4f93d21d | 226 | and %g7, 3, %g7 |
9cc3a1ac | 227 | or %g5, %lo(kern_linear_pte_xor), %g5 |
4f93d21d DM |
228 | sllx %g7, 3, %g7 |
229 | ldx [%g5 + %g7], %g2 | |
74bf4312 | 230 | |
56425306 DM |
231 | .globl kvmap_linear_patch |
232 | kvmap_linear_patch: | |
d7744a09 | 233 | ba,pt %xcc, kvmap_dtlb_tsb4m_load |
2a7e2990 DM |
234 | xor %g2, %g4, %g5 |
235 | ||
74bf4312 DM |
236 | kvmap_dtlb_vmalloc_addr: |
237 | KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath) | |
238 | ||
9076d0e7 | 239 | TSB_LOCK_TAG(%g1, %g2, %g7) |
74bf4312 DM |
240 | |
241 | /* Load and check PTE. */ | |
242 | ldxa [%g5] ASI_PHYS_USE_EC, %g5 | |
8b234274 DM |
243 | mov 1, %g7 |
244 | sllx %g7, TSB_TAG_INVALID_BIT, %g7 | |
74bf4312 | 245 | brgez,a,pn %g5, kvmap_dtlb_longpath |
9076d0e7 | 246 | TSB_STORE(%g1, %g7) |
74bf4312 | 247 | |
9076d0e7 | 248 | TSB_WRITE(%g1, %g5, %g6) |
74bf4312 DM |
249 | |
250 | /* fallthrough to TLB load */ | |
251 | ||
252 | kvmap_dtlb_load: | |
459b6e62 DM |
253 | |
254 | 661: stxa %g5, [%g0] ASI_DTLB_DATA_IN ! Reload TLB | |
74bf4312 | 255 | retry |
459b6e62 DM |
256 | .section .sun4v_2insn_patch, "ax" |
257 | .word 661b | |
258 | nop | |
259 | nop | |
260 | .previous | |
261 | ||
262 | /* For sun4v the ASI_DTLB_DATA_IN store and the retry | |
263 | * instruction get nop'd out and we get here to branch | |
264 | * to the sun4v tlb load code. The registers are setup | |
265 | * as follows: | |
266 | * | |
267 | * %g4: vaddr | |
268 | * %g5: PTE | |
269 | * %g6: TAG | |
270 | * | |
271 | * The sun4v TLB load wants the PTE in %g3 so we fix that | |
272 | * up here. | |
273 | */ | |
274 | ba,pt %xcc, sun4v_dtlb_load | |
275 | mov %g5, %g3 | |
74bf4312 | 276 | |
bf4a7972 | 277 | #ifdef CONFIG_SPARSEMEM_VMEMMAP |
46644c24 DM |
278 | kvmap_vmemmap: |
279 | sub %g4, %g5, %g5 | |
0eef331a | 280 | srlx %g5, ILOG2_4MB, %g5 |
46644c24 DM |
281 | sethi %hi(vmemmap_table), %g1 |
282 | sllx %g5, 3, %g5 | |
283 | or %g1, %lo(vmemmap_table), %g1 | |
284 | ba,pt %xcc, kvmap_dtlb_load | |
285 | ldx [%g1 + %g5], %g5 | |
bf4a7972 | 286 | #endif |
46644c24 | 287 | |
74bf4312 DM |
288 | kvmap_dtlb_nonlinear: |
289 | /* Catch kernel NULL pointer derefs. */ | |
290 | sethi %hi(PAGE_SIZE), %g5 | |
291 | cmp %g4, %g5 | |
292 | bleu,pn %xcc, kvmap_dtlb_longpath | |
56425306 | 293 | nop |
56425306 | 294 | |
bf4a7972 | 295 | #ifdef CONFIG_SPARSEMEM_VMEMMAP |
46644c24 | 296 | /* Do not use the TSB for vmemmap. */ |
1b6b9d62 DM |
297 | mov (VMEMMAP_BASE >> 40), %g5 |
298 | sllx %g5, 40, %g5 | |
46644c24 DM |
299 | cmp %g4,%g5 |
300 | bgeu,pn %xcc, kvmap_vmemmap | |
301 | nop | |
bf4a7972 | 302 | #endif |
46644c24 | 303 | |
74bf4312 DM |
304 | KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load) |
305 | ||
306 | kvmap_dtlb_tsbmiss: | |
2a7e2990 DM |
307 | sethi %hi(MODULES_VADDR), %g5 |
308 | cmp %g4, %g5 | |
74bf4312 | 309 | blu,pn %xcc, kvmap_dtlb_longpath |
1b6b9d62 DM |
310 | mov (VMALLOC_END >> 40), %g5 |
311 | sllx %g5, 40, %g5 | |
2a7e2990 | 312 | cmp %g4, %g5 |
74bf4312 | 313 | bgeu,pn %xcc, kvmap_dtlb_longpath |
2a7e2990 DM |
314 | nop |
315 | ||
316 | kvmap_check_obp: | |
317 | sethi %hi(LOW_OBP_ADDRESS), %g5 | |
318 | cmp %g4, %g5 | |
74bf4312 | 319 | blu,pn %xcc, kvmap_dtlb_vmalloc_addr |
2a7e2990 DM |
320 | mov 0x1, %g5 |
321 | sllx %g5, 32, %g5 | |
322 | cmp %g4, %g5 | |
74bf4312 | 323 | blu,pn %xcc, kvmap_dtlb_obp |
2a7e2990 | 324 | nop |
74bf4312 | 325 | ba,pt %xcc, kvmap_dtlb_vmalloc_addr |
2a7e2990 DM |
326 | nop |
327 | ||
74bf4312 | 328 | kvmap_dtlb_longpath: |
45fec05f DM |
329 | |
330 | 661: rdpr %pstate, %g5 | |
74bf4312 | 331 | wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate |
df7d6aec | 332 | .section .sun4v_2insn_patch, "ax" |
45fec05f | 333 | .word 661b |
8b234274 DM |
334 | SET_GL(1) |
335 | ldxa [%g0] ASI_SCRATCHPAD, %g5 | |
45fec05f DM |
336 | .previous |
337 | ||
459b6e62 DM |
338 | rdpr %tl, %g3 |
339 | cmp %g3, 1 | |
340 | ||
341 | 661: mov TLB_TAG_ACCESS, %g4 | |
74bf4312 | 342 | ldxa [%g4] ASI_DMMU, %g5 |
459b6e62 DM |
343 | .section .sun4v_2insn_patch, "ax" |
344 | .word 661b | |
8b234274 | 345 | ldx [%g5 + HV_FAULT_D_ADDR_OFFSET], %g5 |
459b6e62 DM |
346 | nop |
347 | .previous | |
348 | ||
74bf4312 DM |
349 | be,pt %xcc, sparc64_realfault_common |
350 | mov FAULT_CODE_DTLB, %g4 | |
351 | ba,pt %xcc, winfix_trampoline | |
352 | nop |