Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | /* |
2 | * This file is subject to the terms and conditions of the GNU General Public | |
3 | * License. See the file "COPYING" in the main directory of this archive | |
4 | * for more details. | |
5 | * | |
98de920a | 6 | * Copyright (C) 2003, 04, 07 Ralf Baechle <ralf@linux-mips.org> |
a3c4946d RB |
7 | * Copyright (C) MIPS Technologies, Inc. |
8 | * written by Ralf Baechle <ralf@linux-mips.org> | |
1da177e4 LT |
9 | */ |
10 | #ifndef _ASM_HAZARDS_H | |
11 | #define _ASM_HAZARDS_H | |
12 | ||
02b849f7 | 13 | #include <linux/stringify.h> |
f52fca97 | 14 | #include <asm/compiler.h> |
1da177e4 | 15 | |
02b849f7 RB |
16 | #define ___ssnop \ |
17 | sll $0, $0, 1 | |
d7d86aa8 | 18 | |
02b849f7 RB |
19 | #define ___ehb \ |
20 | sll $0, $0, 3 | |
d7d86aa8 | 21 | |
1da177e4 | 22 | /* |
d7d86aa8 | 23 | * TLB hazards |
1da177e4 | 24 | */ |
f52fca97 | 25 | #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6) && !defined(CONFIG_CPU_CAVIUM_OCTEON) |
1da177e4 | 26 | |
1da177e4 | 27 | /* |
d7d86aa8 | 28 | * MIPSR2 defines ehb for hazard avoidance |
1da177e4 LT |
29 | */ |
30 | ||
02b849f7 RB |
31 | #define __mtc0_tlbw_hazard \ |
32 | ___ehb | |
33 | ||
34 | #define __tlbw_use_hazard \ | |
35 | ___ehb | |
36 | ||
37 | #define __tlb_probe_hazard \ | |
38 | ___ehb | |
39 | ||
40 | #define __irq_enable_hazard \ | |
41 | ___ehb | |
42 | ||
43 | #define __irq_disable_hazard \ | |
44 | ___ehb | |
45 | ||
46 | #define __back_to_back_c0_hazard \ | |
47 | ___ehb | |
48 | ||
1da177e4 | 49 | /* |
d7d86aa8 | 50 | * gcc has a tradition of misscompiling the previous construct using the |
70342287 | 51 | * address of a label as argument to inline assembler. Gas otoh has the |
d7d86aa8 RB |
52 | * annoying difference between la and dla which are only usable for 32-bit |
53 | * rsp. 64-bit code, so can't be used without conditional compilation. | |
54 | * The alterantive is switching the assembler to 64-bit code which happens | |
55 | * to work right even for 32-bit code ... | |
1da177e4 | 56 | */ |
d7d86aa8 RB |
57 | #define instruction_hazard() \ |
58 | do { \ | |
59 | unsigned long tmp; \ | |
60 | \ | |
61 | __asm__ __volatile__( \ | |
f52fca97 | 62 | " .set "MIPS_ISA_LEVEL" \n" \ |
d7d86aa8 RB |
63 | " dla %0, 1f \n" \ |
64 | " jr.hb %0 \n" \ | |
65 | " .set mips0 \n" \ | |
66 | "1: \n" \ | |
67 | : "=r" (tmp)); \ | |
68 | } while (0) | |
1da177e4 | 69 | |
1c7c4451 KC |
70 | #elif (defined(CONFIG_CPU_MIPSR1) && !defined(CONFIG_MIPS_ALCHEMY)) || \ |
71 | defined(CONFIG_CPU_BMIPS) | |
572afc24 RB |
72 | |
73 | /* | |
74 | * These are slightly complicated by the fact that we guarantee R1 kernels to | |
75 | * run fine on R2 processors. | |
76 | */ | |
02b849f7 RB |
77 | |
78 | #define __mtc0_tlbw_hazard \ | |
79 | ___ssnop; \ | |
80 | ___ssnop; \ | |
81 | ___ehb | |
82 | ||
83 | #define __tlbw_use_hazard \ | |
84 | ___ssnop; \ | |
85 | ___ssnop; \ | |
86 | ___ssnop; \ | |
87 | ___ehb | |
88 | ||
89 | #define __tlb_probe_hazard \ | |
90 | ___ssnop; \ | |
91 | ___ssnop; \ | |
92 | ___ssnop; \ | |
93 | ___ehb | |
94 | ||
95 | #define __irq_enable_hazard \ | |
96 | ___ssnop; \ | |
97 | ___ssnop; \ | |
98 | ___ssnop; \ | |
99 | ___ehb | |
100 | ||
101 | #define __irq_disable_hazard \ | |
102 | ___ssnop; \ | |
103 | ___ssnop; \ | |
104 | ___ssnop; \ | |
105 | ___ehb | |
106 | ||
107 | #define __back_to_back_c0_hazard \ | |
108 | ___ssnop; \ | |
109 | ___ssnop; \ | |
110 | ___ssnop; \ | |
111 | ___ehb | |
112 | ||
572afc24 RB |
113 | /* |
114 | * gcc has a tradition of misscompiling the previous construct using the | |
70342287 | 115 | * address of a label as argument to inline assembler. Gas otoh has the |
572afc24 RB |
116 | * annoying difference between la and dla which are only usable for 32-bit |
117 | * rsp. 64-bit code, so can't be used without conditional compilation. | |
118 | * The alterantive is switching the assembler to 64-bit code which happens | |
119 | * to work right even for 32-bit code ... | |
120 | */ | |
121 | #define __instruction_hazard() \ | |
122 | do { \ | |
123 | unsigned long tmp; \ | |
124 | \ | |
125 | __asm__ __volatile__( \ | |
126 | " .set mips64r2 \n" \ | |
127 | " dla %0, 1f \n" \ | |
128 | " jr.hb %0 \n" \ | |
129 | " .set mips0 \n" \ | |
130 | "1: \n" \ | |
131 | : "=r" (tmp)); \ | |
132 | } while (0) | |
133 | ||
134 | #define instruction_hazard() \ | |
135 | do { \ | |
f52fca97 | 136 | if (cpu_has_mips_r2_r6) \ |
572afc24 RB |
137 | __instruction_hazard(); \ |
138 | } while (0) | |
139 | ||
42a4f17d | 140 | #elif defined(CONFIG_MIPS_ALCHEMY) || defined(CONFIG_CPU_CAVIUM_OCTEON) || \ |
15fb0a15 | 141 | defined(CONFIG_CPU_LOONGSON2) || defined(CONFIG_CPU_R10000) || \ |
5a4cbe38 | 142 | defined(CONFIG_CPU_R5500) || defined(CONFIG_CPU_XLR) |
1da177e4 LT |
143 | |
144 | /* | |
d7d86aa8 | 145 | * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer. |
1da177e4 | 146 | */ |
1da177e4 | 147 | |
02b849f7 RB |
148 | #define __mtc0_tlbw_hazard |
149 | ||
150 | #define __tlbw_use_hazard | |
151 | ||
152 | #define __tlb_probe_hazard | |
153 | ||
154 | #define __irq_enable_hazard | |
155 | ||
156 | #define __irq_disable_hazard | |
157 | ||
158 | #define __back_to_back_c0_hazard | |
159 | ||
d7d86aa8 | 160 | #define instruction_hazard() do { } while (0) |
1da177e4 | 161 | |
d7d86aa8 | 162 | #elif defined(CONFIG_CPU_SB1) |
1da177e4 LT |
163 | |
164 | /* | |
d7d86aa8 | 165 | * Mostly like R4000 for historic reasons |
1da177e4 | 166 | */ |
02b849f7 RB |
167 | #define __mtc0_tlbw_hazard |
168 | ||
169 | #define __tlbw_use_hazard | |
170 | ||
171 | #define __tlb_probe_hazard | |
172 | ||
173 | #define __irq_enable_hazard | |
174 | ||
175 | #define __irq_disable_hazard \ | |
176 | ___ssnop; \ | |
177 | ___ssnop; \ | |
178 | ___ssnop | |
179 | ||
180 | #define __back_to_back_c0_hazard | |
181 | ||
d7d86aa8 | 182 | #define instruction_hazard() do { } while (0) |
5068debf | 183 | |
1da177e4 LT |
184 | #else |
185 | ||
186 | /* | |
d7d86aa8 RB |
187 | * Finally the catchall case for all other processors including R4000, R4400, |
188 | * R4600, R4700, R5000, RM7000, NEC VR41xx etc. | |
a3c4946d | 189 | * |
d7d86aa8 RB |
190 | * The taken branch will result in a two cycle penalty for the two killed |
191 | * instructions on R4000 / R4400. Other processors only have a single cycle | |
192 | * hazard so this is nice trick to have an optimal code for a range of | |
193 | * processors. | |
7043ad4f | 194 | */ |
02b849f7 RB |
195 | #define __mtc0_tlbw_hazard \ |
196 | nop; \ | |
197 | nop | |
198 | ||
199 | #define __tlbw_use_hazard \ | |
200 | nop; \ | |
201 | nop; \ | |
202 | nop | |
203 | ||
204 | #define __tlb_probe_hazard \ | |
205 | nop; \ | |
206 | nop; \ | |
207 | nop | |
208 | ||
209 | #define __irq_enable_hazard \ | |
210 | ___ssnop; \ | |
211 | ___ssnop; \ | |
212 | ___ssnop | |
213 | ||
214 | #define __irq_disable_hazard \ | |
215 | nop; \ | |
216 | nop; \ | |
217 | nop | |
218 | ||
219 | #define __back_to_back_c0_hazard \ | |
220 | ___ssnop; \ | |
221 | ___ssnop; \ | |
222 | ___ssnop | |
223 | ||
cc61c1fe | 224 | #define instruction_hazard() do { } while (0) |
41c594ab | 225 | |
d7d86aa8 | 226 | #endif |
1da177e4 | 227 | |
0b624956 CD |
228 | |
229 | /* FPU hazards */ | |
230 | ||
231 | #if defined(CONFIG_CPU_SB1) | |
02b849f7 RB |
232 | |
233 | #define __enable_fpu_hazard \ | |
234 | .set push; \ | |
235 | .set mips64; \ | |
236 | .set noreorder; \ | |
237 | ___ssnop; \ | |
238 | bnezl $0, .+4; \ | |
239 | ___ssnop; \ | |
240 | .set pop | |
241 | ||
242 | #define __disable_fpu_hazard | |
0b624956 | 243 | |
f52fca97 | 244 | #elif defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6) |
02b849f7 RB |
245 | |
246 | #define __enable_fpu_hazard \ | |
247 | ___ehb | |
248 | ||
249 | #define __disable_fpu_hazard \ | |
250 | ___ehb | |
251 | ||
0b624956 | 252 | #else |
02b849f7 RB |
253 | |
254 | #define __enable_fpu_hazard \ | |
255 | nop; \ | |
256 | nop; \ | |
257 | nop; \ | |
258 | nop | |
259 | ||
260 | #define __disable_fpu_hazard \ | |
261 | ___ehb | |
262 | ||
0b624956 CD |
263 | #endif |
264 | ||
02b849f7 RB |
265 | #ifdef __ASSEMBLY__ |
266 | ||
267 | #define _ssnop ___ssnop | |
268 | #define _ehb ___ehb | |
269 | #define mtc0_tlbw_hazard __mtc0_tlbw_hazard | |
270 | #define tlbw_use_hazard __tlbw_use_hazard | |
271 | #define tlb_probe_hazard __tlb_probe_hazard | |
272 | #define irq_enable_hazard __irq_enable_hazard | |
273 | #define irq_disable_hazard __irq_disable_hazard | |
274 | #define back_to_back_c0_hazard __back_to_back_c0_hazard | |
275 | #define enable_fpu_hazard __enable_fpu_hazard | |
276 | #define disable_fpu_hazard __disable_fpu_hazard | |
277 | ||
278 | #else | |
279 | ||
280 | #define _ssnop() \ | |
281 | do { \ | |
282 | __asm__ __volatile__( \ | |
283 | __stringify(___ssnop) \ | |
284 | ); \ | |
285 | } while (0) | |
286 | ||
287 | #define _ehb() \ | |
288 | do { \ | |
289 | __asm__ __volatile__( \ | |
290 | __stringify(___ehb) \ | |
291 | ); \ | |
292 | } while (0) | |
293 | ||
294 | ||
295 | #define mtc0_tlbw_hazard() \ | |
296 | do { \ | |
297 | __asm__ __volatile__( \ | |
298 | __stringify(__mtc0_tlbw_hazard) \ | |
299 | ); \ | |
300 | } while (0) | |
301 | ||
302 | ||
303 | #define tlbw_use_hazard() \ | |
304 | do { \ | |
305 | __asm__ __volatile__( \ | |
306 | __stringify(__tlbw_use_hazard) \ | |
307 | ); \ | |
308 | } while (0) | |
309 | ||
310 | ||
311 | #define tlb_probe_hazard() \ | |
312 | do { \ | |
313 | __asm__ __volatile__( \ | |
314 | __stringify(__tlb_probe_hazard) \ | |
315 | ); \ | |
316 | } while (0) | |
317 | ||
318 | ||
319 | #define irq_enable_hazard() \ | |
320 | do { \ | |
321 | __asm__ __volatile__( \ | |
322 | __stringify(__irq_enable_hazard) \ | |
323 | ); \ | |
324 | } while (0) | |
325 | ||
326 | ||
327 | #define irq_disable_hazard() \ | |
328 | do { \ | |
329 | __asm__ __volatile__( \ | |
330 | __stringify(__irq_disable_hazard) \ | |
331 | ); \ | |
332 | } while (0) | |
333 | ||
334 | ||
335 | #define back_to_back_c0_hazard() \ | |
336 | do { \ | |
337 | __asm__ __volatile__( \ | |
338 | __stringify(__back_to_back_c0_hazard) \ | |
339 | ); \ | |
340 | } while (0) | |
341 | ||
342 | ||
343 | #define enable_fpu_hazard() \ | |
344 | do { \ | |
345 | __asm__ __volatile__( \ | |
346 | __stringify(__enable_fpu_hazard) \ | |
347 | ); \ | |
348 | } while (0) | |
349 | ||
350 | ||
351 | #define disable_fpu_hazard() \ | |
352 | do { \ | |
353 | __asm__ __volatile__( \ | |
354 | __stringify(__disable_fpu_hazard) \ | |
355 | ); \ | |
356 | } while (0) | |
357 | ||
358 | /* | |
359 | * MIPS R2 instruction hazard barrier. Needs to be called as a subroutine. | |
360 | */ | |
361 | extern void mips_ihb(void); | |
362 | ||
363 | #endif /* __ASSEMBLY__ */ | |
364 | ||
1da177e4 | 365 | #endif /* _ASM_HAZARDS_H */ |