| 1 | /* sun4v_tlb_miss.S: Sun4v TLB miss handlers. |
| 2 | * |
| 3 | * Copyright (C) 2006 <davem@davemloft.net> |
| 4 | */ |
| 5 | |
| 6 | .text |
| 7 | .align 32 |
| 8 | |
| 9 | sun4v_itlb_miss: |
| 10 | /* Load CPU ID into %g3. */ |
| 11 | mov SCRATCHPAD_CPUID, %g1 |
| 12 | ldxa [%g1] ASI_SCRATCHPAD, %g3 |
| 13 | |
| 14 | /* Load UTSB reg into %g1. */ |
| 15 | ldxa [%g1 + %g1] ASI_SCRATCHPAD, %g1 |
| 16 | |
| 17 | /* Load &trap_block[smp_processor_id()] into %g2. */ |
| 18 | sethi %hi(trap_block), %g2 |
| 19 | or %g2, %lo(trap_block), %g2 |
| 20 | sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3 |
| 21 | add %g2, %g3, %g2 |
| 22 | |
| 23 | /* Create a TAG TARGET, "(vaddr>>22) | (ctx << 48)", in %g6. |
| 24 | * Branch if kernel TLB miss. The kernel TSB and user TSB miss |
| 25 | * code wants the missing virtual address in %g4, so that value |
| 26 | * cannot be modified through the entirety of this handler. |
| 27 | */ |
| 28 | ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_ADDR_OFFSET], %g4 |
| 29 | ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_CTX_OFFSET], %g5 |
| 30 | srlx %g4, 22, %g3 |
| 31 | sllx %g5, 48, %g6 |
| 32 | or %g6, %g3, %g6 |
| 33 | brz,pn %g5, kvmap_itlb_4v |
| 34 | nop |
| 35 | |
| 36 | /* Create TSB pointer. This is something like: |
| 37 | * |
| 38 | * index_mask = (512 << (tsb_reg & 0x7UL)) - 1UL; |
| 39 | * tsb_base = tsb_reg & ~0x7UL; |
| 40 | */ |
| 41 | and %g1, 0x7, %g3 |
| 42 | andn %g1, 0x7, %g1 |
| 43 | mov 512, %g7 |
| 44 | sllx %g7, %g3, %g7 |
| 45 | sub %g7, 1, %g7 |
| 46 | |
| 47 | /* TSB index mask is in %g7, tsb base is in %g1. Compute |
| 48 | * the TSB entry pointer into %g1: |
| 49 | * |
| 50 | * tsb_index = ((vaddr >> PAGE_SHIFT) & tsb_mask); |
| 51 | * tsb_ptr = tsb_base + (tsb_index * 16); |
| 52 | */ |
| 53 | srlx %g4, PAGE_SHIFT, %g3 |
| 54 | and %g3, %g7, %g3 |
| 55 | sllx %g3, 4, %g3 |
| 56 | add %g1, %g3, %g1 |
| 57 | |
| 58 | /* Load TSB tag/pte into %g2/%g3 and compare the tag. */ |
| 59 | ldda [%g1] ASI_QUAD_LDD_PHYS, %g2 |
| 60 | cmp %g2, %g6 |
| 61 | sethi %hi(_PAGE_EXEC), %g7 |
| 62 | bne,a,pn %xcc, tsb_miss_page_table_walk |
| 63 | mov FAULT_CODE_ITLB, %g3 |
| 64 | andcc %g3, %g7, %g0 |
| 65 | be,a,pn %xcc, tsb_do_fault |
| 66 | mov FAULT_CODE_ITLB, %g3 |
| 67 | |
| 68 | /* We have a valid entry, make hypervisor call to load |
| 69 | * I-TLB and return from trap. |
| 70 | * |
| 71 | * %g3: PTE |
| 72 | * %g4: vaddr |
| 73 | * %g6: TAG TARGET (only "CTX << 48" part matters) |
| 74 | */ |
| 75 | sun4v_itlb_load: |
| 76 | mov %o0, %g1 ! save %o0 |
| 77 | mov %o1, %g2 ! save %o1 |
| 78 | mov %o2, %g5 ! save %o2 |
| 79 | mov %o3, %g7 ! save %o3 |
| 80 | mov %g4, %o0 ! vaddr |
| 81 | srlx %g6, 48, %o1 ! ctx |
| 82 | mov %g3, %o2 ! PTE |
| 83 | mov HV_MMU_IMMU, %o3 ! flags |
| 84 | ta HV_MMU_MAP_ADDR_TRAP |
| 85 | mov %g1, %o0 ! restore %o0 |
| 86 | mov %g2, %o1 ! restore %o1 |
| 87 | mov %g5, %o2 ! restore %o2 |
| 88 | mov %g7, %o3 ! restore %o3 |
| 89 | |
| 90 | retry |
| 91 | |
| 92 | sun4v_dtlb_miss: |
| 93 | /* Load CPU ID into %g3. */ |
| 94 | mov SCRATCHPAD_CPUID, %g1 |
| 95 | ldxa [%g1] ASI_SCRATCHPAD, %g3 |
| 96 | |
| 97 | /* Load UTSB reg into %g1. */ |
| 98 | ldxa [%g1 + %g1] ASI_SCRATCHPAD, %g1 |
| 99 | |
| 100 | /* Load &trap_block[smp_processor_id()] into %g2. */ |
| 101 | sethi %hi(trap_block), %g2 |
| 102 | or %g2, %lo(trap_block), %g2 |
| 103 | sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3 |
| 104 | add %g2, %g3, %g2 |
| 105 | |
| 106 | /* Create a TAG TARGET, "(vaddr>>22) | (ctx << 48)", in %g6. |
| 107 | * Branch if kernel TLB miss. The kernel TSB and user TSB miss |
| 108 | * code wants the missing virtual address in %g4, so that value |
| 109 | * cannot be modified through the entirety of this handler. |
| 110 | */ |
| 111 | ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_ADDR_OFFSET], %g4 |
| 112 | ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_CTX_OFFSET], %g5 |
| 113 | srlx %g4, 22, %g3 |
| 114 | sllx %g5, 48, %g6 |
| 115 | or %g6, %g3, %g6 |
| 116 | brz,pn %g5, kvmap_dtlb_4v |
| 117 | nop |
| 118 | |
| 119 | /* Create TSB pointer. This is something like: |
| 120 | * |
| 121 | * index_mask = (512 << (tsb_reg & 0x7UL)) - 1UL; |
| 122 | * tsb_base = tsb_reg & ~0x7UL; |
| 123 | */ |
| 124 | and %g1, 0x7, %g3 |
| 125 | andn %g1, 0x7, %g1 |
| 126 | mov 512, %g7 |
| 127 | sllx %g7, %g3, %g7 |
| 128 | sub %g7, 1, %g7 |
| 129 | |
| 130 | /* TSB index mask is in %g7, tsb base is in %g1. Compute |
| 131 | * the TSB entry pointer into %g1: |
| 132 | * |
| 133 | * tsb_index = ((vaddr >> PAGE_SHIFT) & tsb_mask); |
| 134 | * tsb_ptr = tsb_base + (tsb_index * 16); |
| 135 | */ |
| 136 | srlx %g4, PAGE_SHIFT, %g3 |
| 137 | and %g3, %g7, %g3 |
| 138 | sllx %g3, 4, %g3 |
| 139 | add %g1, %g3, %g1 |
| 140 | |
| 141 | /* Load TSB tag/pte into %g2/%g3 and compare the tag. */ |
| 142 | ldda [%g1] ASI_QUAD_LDD_PHYS, %g2 |
| 143 | cmp %g2, %g6 |
| 144 | bne,a,pn %xcc, tsb_miss_page_table_walk |
| 145 | mov FAULT_CODE_ITLB, %g3 |
| 146 | |
| 147 | /* We have a valid entry, make hypervisor call to load |
| 148 | * D-TLB and return from trap. |
| 149 | * |
| 150 | * %g3: PTE |
| 151 | * %g4: vaddr |
| 152 | * %g6: TAG TARGET (only "CTX << 48" part matters) |
| 153 | */ |
| 154 | sun4v_dtlb_load: |
| 155 | mov %o0, %g1 ! save %o0 |
| 156 | mov %o1, %g2 ! save %o1 |
| 157 | mov %o2, %g5 ! save %o2 |
| 158 | mov %o3, %g7 ! save %o3 |
| 159 | mov %g4, %o0 ! vaddr |
| 160 | srlx %g6, 48, %o1 ! ctx |
| 161 | mov %g3, %o2 ! PTE |
| 162 | mov HV_MMU_DMMU, %o3 ! flags |
| 163 | ta HV_MMU_MAP_ADDR_TRAP |
| 164 | mov %g1, %o0 ! restore %o0 |
| 165 | mov %g2, %o1 ! restore %o1 |
| 166 | mov %g5, %o2 ! restore %o2 |
| 167 | mov %g7, %o3 ! restore %o3 |
| 168 | |
| 169 | retry |
| 170 | |
| 171 | sun4v_dtlb_prot: |
| 172 | /* Load CPU ID into %g3. */ |
| 173 | mov SCRATCHPAD_CPUID, %g1 |
| 174 | ldxa [%g1] ASI_SCRATCHPAD, %g3 |
| 175 | |
| 176 | /* Load &trap_block[smp_processor_id()] into %g2. */ |
| 177 | sethi %hi(trap_block), %g2 |
| 178 | or %g2, %lo(trap_block), %g2 |
| 179 | sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3 |
| 180 | add %g2, %g3, %g2 |
| 181 | |
| 182 | ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_ADDR_OFFSET], %g5 |
| 183 | rdpr %tl, %g1 |
| 184 | cmp %g1, 1 |
| 185 | bgu,pn %xcc, winfix_trampoline |
| 186 | nop |
| 187 | ba,pt %xcc, sparc64_realfault_common |
| 188 | mov FAULT_CODE_DTLB | FAULT_CODE_WRITE, %g4 |
| 189 | |
| 190 | /* Called from trap table with &trap_block[smp_processor_id()] in |
| 191 | * %g5 and SCRATCHPAD_UTSBREG1 contents in %g1. |
| 192 | */ |
| 193 | sun4v_itsb_miss: |
| 194 | ldx [%g5 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_ADDR_OFFSET], %g4 |
| 195 | ldx [%g5 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_CTX_OFFSET], %g5 |
| 196 | |
| 197 | srlx %g4, 22, %g7 |
| 198 | sllx %g5, 48, %g6 |
| 199 | or %g6, %g7, %g6 |
| 200 | brz,pn %g5, kvmap_itlb_4v |
| 201 | nop |
| 202 | |
| 203 | ba,pt %xcc, sun4v_tsb_miss_common |
| 204 | mov FAULT_CODE_ITLB, %g3 |
| 205 | |
| 206 | /* Called from trap table with &trap_block[smp_processor_id()] in |
| 207 | * %g5 and SCRATCHPAD_UTSBREG1 contents in %g1. |
| 208 | */ |
| 209 | sun4v_dtsb_miss: |
| 210 | ldx [%g5 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_ADDR_OFFSET], %g4 |
| 211 | ldx [%g5 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_CTX_OFFSET], %g5 |
| 212 | |
| 213 | srlx %g4, 22, %g7 |
| 214 | sllx %g5, 48, %g6 |
| 215 | or %g6, %g7, %g6 |
| 216 | brz,pn %g5, kvmap_dtlb_4v |
| 217 | nop |
| 218 | |
| 219 | mov FAULT_CODE_DTLB, %g3 |
| 220 | |
| 221 | /* Create TSB pointer into %g1. This is something like: |
| 222 | * |
| 223 | * index_mask = (512 << (tsb_reg & 0x7UL)) - 1UL; |
| 224 | * tsb_base = tsb_reg & ~0x7UL; |
| 225 | * tsb_index = ((vaddr >> PAGE_SHIFT) & tsb_mask); |
| 226 | * tsb_ptr = tsb_base + (tsb_index * 16); |
| 227 | */ |
| 228 | sun4v_tsb_miss_common: |
| 229 | and %g1, 0x7, %g2 |
| 230 | andn %g1, 0x7, %g1 |
| 231 | mov 512, %g7 |
| 232 | sllx %g7, %g2, %g7 |
| 233 | sub %g7, 1, %g7 |
| 234 | srlx %g4, PAGE_SHIFT, %g2 |
| 235 | and %g2, %g7, %g2 |
| 236 | sllx %g2, 4, %g2 |
| 237 | ba,pt %xcc, tsb_miss_page_table_walk |
| 238 | add %g1, %g2, %g1 |
| 239 | |
| 240 | /* Instruction Access Exception, tl0. */ |
| 241 | sun4v_iacc: |
| 242 | mov SCRATCHPAD_CPUID, %g1 |
| 243 | ldxa [%g1] ASI_SCRATCHPAD, %g3 |
| 244 | sethi %hi(trap_block), %g2 |
| 245 | or %g2, %lo(trap_block), %g2 |
| 246 | sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3 |
| 247 | add %g2, %g3, %g2 |
| 248 | ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_TYPE_OFFSET], %g3 |
| 249 | ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_ADDR_OFFSET], %g4 |
| 250 | ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_CTX_OFFSET], %g5 |
| 251 | sllx %g3, 16, %g3 |
| 252 | or %g5, %g3, %g5 |
| 253 | ba,pt %xcc, etrap |
| 254 | rd %pc, %g7 |
| 255 | mov %l4, %o1 |
| 256 | mov %l5, %o2 |
| 257 | call sun4v_insn_access_exception |
| 258 | add %sp, PTREGS_OFF, %o0 |
| 259 | ba,a,pt %xcc, rtrap_clr_l6 |
| 260 | |
| 261 | /* Instruction Access Exception, tl1. */ |
| 262 | sun4v_iacc_tl1: |
| 263 | mov SCRATCHPAD_CPUID, %g1 |
| 264 | ldxa [%g1] ASI_SCRATCHPAD, %g3 |
| 265 | sethi %hi(trap_block), %g2 |
| 266 | or %g2, %lo(trap_block), %g2 |
| 267 | sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3 |
| 268 | add %g2, %g3, %g2 |
| 269 | ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_TYPE_OFFSET], %g3 |
| 270 | ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_ADDR_OFFSET], %g4 |
| 271 | ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_CTX_OFFSET], %g5 |
| 272 | sllx %g3, 16, %g3 |
| 273 | or %g5, %g3, %g5 |
| 274 | ba,pt %xcc, etraptl1 |
| 275 | rd %pc, %g7 |
| 276 | mov %l4, %o1 |
| 277 | mov %l5, %o2 |
| 278 | call sun4v_insn_access_exception_tl1 |
| 279 | add %sp, PTREGS_OFF, %o0 |
| 280 | ba,a,pt %xcc, rtrap_clr_l6 |
| 281 | |
| 282 | /* Data Access Exception, tl0. */ |
| 283 | sun4v_dacc: |
| 284 | mov SCRATCHPAD_CPUID, %g1 |
| 285 | ldxa [%g1] ASI_SCRATCHPAD, %g3 |
| 286 | sethi %hi(trap_block), %g2 |
| 287 | or %g2, %lo(trap_block), %g2 |
| 288 | sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3 |
| 289 | add %g2, %g3, %g2 |
| 290 | ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_TYPE_OFFSET], %g3 |
| 291 | ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_ADDR_OFFSET], %g4 |
| 292 | ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_CTX_OFFSET], %g5 |
| 293 | sllx %g3, 16, %g3 |
| 294 | or %g5, %g3, %g5 |
| 295 | ba,pt %xcc, etrap |
| 296 | rd %pc, %g7 |
| 297 | mov %l4, %o1 |
| 298 | mov %l5, %o2 |
| 299 | call sun4v_data_access_exception |
| 300 | add %sp, PTREGS_OFF, %o0 |
| 301 | ba,a,pt %xcc, rtrap_clr_l6 |
| 302 | |
| 303 | /* Data Access Exception, tl1. */ |
| 304 | sun4v_dacc_tl1: |
| 305 | mov SCRATCHPAD_CPUID, %g1 |
| 306 | ldxa [%g1] ASI_SCRATCHPAD, %g3 |
| 307 | sethi %hi(trap_block), %g2 |
| 308 | or %g2, %lo(trap_block), %g2 |
| 309 | sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3 |
| 310 | add %g2, %g3, %g2 |
| 311 | ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_TYPE_OFFSET], %g3 |
| 312 | ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_ADDR_OFFSET], %g4 |
| 313 | ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_CTX_OFFSET], %g5 |
| 314 | sllx %g3, 16, %g3 |
| 315 | or %g5, %g3, %g5 |
| 316 | ba,pt %xcc, etraptl1 |
| 317 | rd %pc, %g7 |
| 318 | mov %l4, %o1 |
| 319 | mov %l5, %o2 |
| 320 | call sun4v_data_access_exception_tl1 |
| 321 | add %sp, PTREGS_OFF, %o0 |
| 322 | ba,a,pt %xcc, rtrap_clr_l6 |
| 323 | |
| 324 | /* Memory Address Unaligned. */ |
| 325 | sun4v_mna: |
| 326 | mov SCRATCHPAD_CPUID, %g1 |
| 327 | ldxa [%g1] ASI_SCRATCHPAD, %g3 |
| 328 | sethi %hi(trap_block), %g2 |
| 329 | or %g2, %lo(trap_block), %g2 |
| 330 | sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3 |
| 331 | add %g2, %g3, %g2 |
| 332 | mov HV_FAULT_TYPE_UNALIGNED, %g3 |
| 333 | ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_ADDR_OFFSET], %g4 |
| 334 | ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_CTX_OFFSET], %g5 |
| 335 | sllx %g3, 16, %g3 |
| 336 | or %g5, %g3, %g5 |
| 337 | |
| 338 | /* Window fixup? */ |
| 339 | rdpr %tl, %g2 |
| 340 | cmp %g2, 1 |
| 341 | bgu,pn %icc, winfix_mna |
| 342 | rdpr %tpc, %g3 |
| 343 | |
| 344 | ba,pt %xcc, etrap |
| 345 | rd %pc, %g7 |
| 346 | mov %l4, %o1 |
| 347 | mov %l5, %o2 |
| 348 | call sun4v_mna |
| 349 | add %sp, PTREGS_OFF, %o0 |
| 350 | ba,a,pt %xcc, rtrap_clr_l6 |
| 351 | |
| 352 | /* Privileged Action. */ |
| 353 | sun4v_privact: |
| 354 | ba,pt %xcc, etrap |
| 355 | rd %pc, %g7 |
| 356 | call do_privact |
| 357 | add %sp, PTREGS_OFF, %o0 |
| 358 | ba,a,pt %xcc, rtrap_clr_l6 |
| 359 | |
| 360 | /* Unaligned ldd float, tl0. */ |
| 361 | sun4v_lddfmna: |
| 362 | mov SCRATCHPAD_CPUID, %g1 |
| 363 | ldxa [%g1] ASI_SCRATCHPAD, %g3 |
| 364 | sethi %hi(trap_block), %g2 |
| 365 | or %g2, %lo(trap_block), %g2 |
| 366 | sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3 |
| 367 | add %g2, %g3, %g2 |
| 368 | ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_TYPE_OFFSET], %g3 |
| 369 | ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_ADDR_OFFSET], %g4 |
| 370 | ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_CTX_OFFSET], %g5 |
| 371 | sllx %g3, 16, %g3 |
| 372 | or %g5, %g3, %g5 |
| 373 | ba,pt %xcc, etrap |
| 374 | rd %pc, %g7 |
| 375 | mov %l4, %o1 |
| 376 | mov %l5, %o2 |
| 377 | call handle_lddfmna |
| 378 | add %sp, PTREGS_OFF, %o0 |
| 379 | ba,a,pt %xcc, rtrap_clr_l6 |
| 380 | |
| 381 | /* Unaligned std float, tl0. */ |
| 382 | sun4v_stdfmna: |
| 383 | mov SCRATCHPAD_CPUID, %g1 |
| 384 | ldxa [%g1] ASI_SCRATCHPAD, %g3 |
| 385 | sethi %hi(trap_block), %g2 |
| 386 | or %g2, %lo(trap_block), %g2 |
| 387 | sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3 |
| 388 | add %g2, %g3, %g2 |
| 389 | ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_TYPE_OFFSET], %g3 |
| 390 | ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_ADDR_OFFSET], %g4 |
| 391 | ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_CTX_OFFSET], %g5 |
| 392 | sllx %g3, 16, %g3 |
| 393 | or %g5, %g3, %g5 |
| 394 | ba,pt %xcc, etrap |
| 395 | rd %pc, %g7 |
| 396 | mov %l4, %o1 |
| 397 | mov %l5, %o2 |
| 398 | call handle_stdfmna |
| 399 | add %sp, PTREGS_OFF, %o0 |
| 400 | ba,a,pt %xcc, rtrap_clr_l6 |
| 401 | |
| 402 | #define BRANCH_ALWAYS 0x10680000 |
| 403 | #define NOP 0x01000000 |
| 404 | #define SUN4V_DO_PATCH(OLD, NEW) \ |
| 405 | sethi %hi(NEW), %g1; \ |
| 406 | or %g1, %lo(NEW), %g1; \ |
| 407 | sethi %hi(OLD), %g2; \ |
| 408 | or %g2, %lo(OLD), %g2; \ |
| 409 | sub %g1, %g2, %g1; \ |
| 410 | sethi %hi(BRANCH_ALWAYS), %g3; \ |
| 411 | srl %g1, 2, %g1; \ |
| 412 | or %g3, %lo(BRANCH_ALWAYS), %g3; \ |
| 413 | or %g3, %g1, %g3; \ |
| 414 | stw %g3, [%g2]; \ |
| 415 | sethi %hi(NOP), %g3; \ |
| 416 | or %g3, %lo(NOP), %g3; \ |
| 417 | stw %g3, [%g2 + 0x4]; \ |
| 418 | flush %g2; |
| 419 | |
| 420 | .globl sun4v_patch_tlb_handlers |
| 421 | .type sun4v_patch_tlb_handlers,#function |
| 422 | sun4v_patch_tlb_handlers: |
| 423 | SUN4V_DO_PATCH(tl0_iamiss, sun4v_itlb_miss) |
| 424 | SUN4V_DO_PATCH(tl1_iamiss, sun4v_itlb_miss) |
| 425 | SUN4V_DO_PATCH(tl0_damiss, sun4v_dtlb_miss) |
| 426 | SUN4V_DO_PATCH(tl1_damiss, sun4v_dtlb_miss) |
| 427 | SUN4V_DO_PATCH(tl0_daprot, sun4v_dtlb_prot) |
| 428 | SUN4V_DO_PATCH(tl1_daprot, sun4v_dtlb_prot) |
| 429 | SUN4V_DO_PATCH(tl0_iax, sun4v_iacc) |
| 430 | SUN4V_DO_PATCH(tl1_iax, sun4v_iacc_tl1) |
| 431 | SUN4V_DO_PATCH(tl0_dax, sun4v_dacc) |
| 432 | SUN4V_DO_PATCH(tl1_dax, sun4v_dacc_tl1) |
| 433 | SUN4V_DO_PATCH(tl0_mna, sun4v_mna) |
| 434 | SUN4V_DO_PATCH(tl1_mna, sun4v_mna) |
| 435 | SUN4V_DO_PATCH(tl0_lddfmna, sun4v_lddfmna) |
| 436 | SUN4V_DO_PATCH(tl0_stdfmna, sun4v_stdfmna) |
| 437 | SUN4V_DO_PATCH(tl0_privact, sun4v_privact) |
| 438 | retl |
| 439 | nop |
| 440 | .size sun4v_patch_tlb_handlers,.-sun4v_patch_tlb_handlers |