sparc64: Fix physical memory management regressions with large max_phys_bits.
[deliverable/linux.git] / arch / sparc / kernel / ktlb.S
1 /* arch/sparc64/kernel/ktlb.S: Kernel mapping TLB miss handling.
2 *
3 * Copyright (C) 1995, 1997, 2005, 2008 David S. Miller <davem@davemloft.net>
4 * Copyright (C) 1996 Eddie C. Dost (ecd@brainaid.de)
5 * Copyright (C) 1996 Miguel de Icaza (miguel@nuclecu.unam.mx)
6 * Copyright (C) 1996,98,99 Jakub Jelinek (jj@sunsite.mff.cuni.cz)
7 */
8
9 #include <asm/head.h>
10 #include <asm/asi.h>
11 #include <asm/page.h>
12 #include <asm/pgtable.h>
13 #include <asm/tsb.h>
14
15 .text
16 .align 32
17
18 kvmap_itlb:
19 /* g6: TAG TARGET */
20 mov TLB_TAG_ACCESS, %g4
21 ldxa [%g4] ASI_IMMU, %g4
22
23 /* sun4v_itlb_miss branches here with the missing virtual
24 * address already loaded into %g4
25 */
26 kvmap_itlb_4v:
27
28 /* Catch kernel NULL pointer calls. */
29 sethi %hi(PAGE_SIZE), %g5
30 cmp %g4, %g5
31 blu,pn %xcc, kvmap_itlb_longpath
32 nop
33
34 KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_itlb_load)
35
36 kvmap_itlb_tsb_miss:
37 sethi %hi(LOW_OBP_ADDRESS), %g5
38 cmp %g4, %g5
39 blu,pn %xcc, kvmap_itlb_vmalloc_addr
40 mov 0x1, %g5
41 sllx %g5, 32, %g5
42 cmp %g4, %g5
43 blu,pn %xcc, kvmap_itlb_obp
44 nop
45
46 kvmap_itlb_vmalloc_addr:
47 KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_itlb_longpath)
48
49 TSB_LOCK_TAG(%g1, %g2, %g7)
50 TSB_WRITE(%g1, %g5, %g6)
51
52 /* fallthrough to TLB load */
53
54 kvmap_itlb_load:
55
56 661: stxa %g5, [%g0] ASI_ITLB_DATA_IN
57 retry
58 .section .sun4v_2insn_patch, "ax"
59 .word 661b
60 nop
61 nop
62 .previous
63
64 /* For sun4v the ASI_ITLB_DATA_IN store and the retry
65 * instruction get nop'd out and we get here to branch
66 * to the sun4v tlb load code. The registers are setup
67 * as follows:
68 *
69 * %g4: vaddr
70 * %g5: PTE
71 * %g6: TAG
72 *
73 * The sun4v TLB load wants the PTE in %g3 so we fix that
74 * up here.
75 */
76 ba,pt %xcc, sun4v_itlb_load
77 mov %g5, %g3
78
79 kvmap_itlb_longpath:
80
81 661: rdpr %pstate, %g5
82 wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate
83 .section .sun4v_2insn_patch, "ax"
84 .word 661b
85 SET_GL(1)
86 nop
87 .previous
88
89 rdpr %tpc, %g5
90 ba,pt %xcc, sparc64_realfault_common
91 mov FAULT_CODE_ITLB, %g4
92
93 kvmap_itlb_obp:
94 OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_itlb_longpath)
95
96 TSB_LOCK_TAG(%g1, %g2, %g7)
97
98 TSB_WRITE(%g1, %g5, %g6)
99
100 ba,pt %xcc, kvmap_itlb_load
101 nop
102
103 kvmap_dtlb_obp:
104 OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_dtlb_longpath)
105
106 TSB_LOCK_TAG(%g1, %g2, %g7)
107
108 TSB_WRITE(%g1, %g5, %g6)
109
110 ba,pt %xcc, kvmap_dtlb_load
111 nop
112
113 kvmap_linear_early:
114 sethi %hi(kern_linear_pte_xor), %g7
115 ldx [%g7 + %lo(kern_linear_pte_xor)], %g2
116 ba,pt %xcc, kvmap_dtlb_tsb4m_load
117 xor %g2, %g4, %g5
118
119 .align 32
120 kvmap_dtlb_tsb4m_load:
121 TSB_LOCK_TAG(%g1, %g2, %g7)
122 TSB_WRITE(%g1, %g5, %g6)
123 ba,pt %xcc, kvmap_dtlb_load
124 nop
125
126 kvmap_dtlb:
127 /* %g6: TAG TARGET */
128 mov TLB_TAG_ACCESS, %g4
129 ldxa [%g4] ASI_DMMU, %g4
130
131 /* sun4v_dtlb_miss branches here with the missing virtual
132 * address already loaded into %g4
133 */
134 kvmap_dtlb_4v:
135 brgez,pn %g4, kvmap_dtlb_nonlinear
136 nop
137
138 #ifdef CONFIG_DEBUG_PAGEALLOC
139 /* Index through the base page size TSB even for linear
140 * mappings when using page allocation debugging.
141 */
142 KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
143 #else
144 /* Correct TAG_TARGET is already in %g6, check 4mb TSB. */
145 KERN_TSB4M_LOOKUP_TL1(%g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
146 #endif
147 /* Linear mapping TSB lookup failed. Fallthrough to kernel
148 * page table based lookup.
149 */
150 .globl kvmap_linear_patch
151 kvmap_linear_patch:
152 ba,a,pt %xcc, kvmap_linear_early
153
154 kvmap_dtlb_vmalloc_addr:
155 KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath)
156
157 TSB_LOCK_TAG(%g1, %g2, %g7)
158 TSB_WRITE(%g1, %g5, %g6)
159
160 /* fallthrough to TLB load */
161
162 kvmap_dtlb_load:
163
164 661: stxa %g5, [%g0] ASI_DTLB_DATA_IN ! Reload TLB
165 retry
166 .section .sun4v_2insn_patch, "ax"
167 .word 661b
168 nop
169 nop
170 .previous
171
172 /* For sun4v the ASI_DTLB_DATA_IN store and the retry
173 * instruction get nop'd out and we get here to branch
174 * to the sun4v tlb load code. The registers are setup
175 * as follows:
176 *
177 * %g4: vaddr
178 * %g5: PTE
179 * %g6: TAG
180 *
181 * The sun4v TLB load wants the PTE in %g3 so we fix that
182 * up here.
183 */
184 ba,pt %xcc, sun4v_dtlb_load
185 mov %g5, %g3
186
187 #ifdef CONFIG_SPARSEMEM_VMEMMAP
188 kvmap_vmemmap:
189 sub %g4, %g5, %g5
190 srlx %g5, ILOG2_4MB, %g5
191 sethi %hi(vmemmap_table), %g1
192 sllx %g5, 3, %g5
193 or %g1, %lo(vmemmap_table), %g1
194 ba,pt %xcc, kvmap_dtlb_load
195 ldx [%g1 + %g5], %g5
196 #endif
197
198 kvmap_dtlb_nonlinear:
199 /* Catch kernel NULL pointer derefs. */
200 sethi %hi(PAGE_SIZE), %g5
201 cmp %g4, %g5
202 bleu,pn %xcc, kvmap_dtlb_longpath
203 nop
204
205 #ifdef CONFIG_SPARSEMEM_VMEMMAP
206 /* Do not use the TSB for vmemmap. */
207 mov (VMEMMAP_BASE >> 40), %g5
208 sllx %g5, 40, %g5
209 cmp %g4,%g5
210 bgeu,pn %xcc, kvmap_vmemmap
211 nop
212 #endif
213
214 KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
215
216 kvmap_dtlb_tsbmiss:
217 sethi %hi(MODULES_VADDR), %g5
218 cmp %g4, %g5
219 blu,pn %xcc, kvmap_dtlb_longpath
220 mov (VMALLOC_END >> 40), %g5
221 sllx %g5, 40, %g5
222 cmp %g4, %g5
223 bgeu,pn %xcc, kvmap_dtlb_longpath
224 nop
225
226 kvmap_check_obp:
227 sethi %hi(LOW_OBP_ADDRESS), %g5
228 cmp %g4, %g5
229 blu,pn %xcc, kvmap_dtlb_vmalloc_addr
230 mov 0x1, %g5
231 sllx %g5, 32, %g5
232 cmp %g4, %g5
233 blu,pn %xcc, kvmap_dtlb_obp
234 nop
235 ba,pt %xcc, kvmap_dtlb_vmalloc_addr
236 nop
237
238 kvmap_dtlb_longpath:
239
240 661: rdpr %pstate, %g5
241 wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate
242 .section .sun4v_2insn_patch, "ax"
243 .word 661b
244 SET_GL(1)
245 ldxa [%g0] ASI_SCRATCHPAD, %g5
246 .previous
247
248 rdpr %tl, %g3
249 cmp %g3, 1
250
251 661: mov TLB_TAG_ACCESS, %g4
252 ldxa [%g4] ASI_DMMU, %g5
253 .section .sun4v_2insn_patch, "ax"
254 .word 661b
255 ldx [%g5 + HV_FAULT_D_ADDR_OFFSET], %g5
256 nop
257 .previous
258
259 be,pt %xcc, sparc64_realfault_common
260 mov FAULT_CODE_DTLB, %g4
261 ba,pt %xcc, winfix_trampoline
262 nop
This page took 0.041182 seconds and 5 git commands to generate.