Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | /* |
2 | * This file is subject to the terms and conditions of the GNU General Public | |
3 | * License. See the file "COPYING" in the main directory of this archive | |
4 | * for more details. | |
5 | * | |
98de920a | 6 | * Copyright (C) 2003, 04, 07 Ralf Baechle <ralf@linux-mips.org> |
a3c4946d RB |
7 | * Copyright (C) MIPS Technologies, Inc. |
8 | * written by Ralf Baechle <ralf@linux-mips.org> | |
1da177e4 LT |
9 | */ |
10 | #ifndef _ASM_HAZARDS_H | |
11 | #define _ASM_HAZARDS_H | |
12 | ||
36396f3c | 13 | #ifdef __ASSEMBLY__ |
d7d86aa8 | 14 | #define ASMMACRO(name, code...) .macro name; code; .endm |
1da177e4 LT |
15 | #else |
16 | ||
572afc24 RB |
17 | #include <asm/cpu-features.h> |
18 | ||
d7d86aa8 RB |
19 | #define ASMMACRO(name, code...) \ |
20 | __asm__(".macro " #name "; " #code "; .endm"); \ | |
21 | \ | |
22 | static inline void name(void) \ | |
23 | { \ | |
24 | __asm__ __volatile__ (#name); \ | |
25 | } | |
1da177e4 | 26 | |
98de920a | 27 | /* |
70342287 | 28 | * MIPS R2 instruction hazard barrier. Needs to be called as a subroutine. |
98de920a RB |
29 | */ |
30 | extern void mips_ihb(void); | |
31 | ||
1da177e4 LT |
32 | #endif |
33 | ||
d7d86aa8 RB |
34 | ASMMACRO(_ssnop, |
35 | sll $0, $0, 1 | |
36 | ) | |
37 | ||
38 | ASMMACRO(_ehb, | |
39 | sll $0, $0, 3 | |
40 | ) | |
41 | ||
1da177e4 | 42 | /* |
d7d86aa8 | 43 | * TLB hazards |
1da177e4 | 44 | */ |
bd6d85c2 | 45 | #if defined(CONFIG_CPU_MIPSR2) && !defined(CONFIG_CPU_CAVIUM_OCTEON) |
1da177e4 | 46 | |
1da177e4 | 47 | /* |
d7d86aa8 | 48 | * MIPSR2 defines ehb for hazard avoidance |
1da177e4 LT |
49 | */ |
50 | ||
d7d86aa8 RB |
51 | ASMMACRO(mtc0_tlbw_hazard, |
52 | _ehb | |
53 | ) | |
54 | ASMMACRO(tlbw_use_hazard, | |
55 | _ehb | |
56 | ) | |
57 | ASMMACRO(tlb_probe_hazard, | |
58 | _ehb | |
59 | ) | |
60 | ASMMACRO(irq_enable_hazard, | |
7605b390 | 61 | _ehb |
d7d86aa8 RB |
62 | ) |
63 | ASMMACRO(irq_disable_hazard, | |
1da177e4 | 64 | _ehb |
d7d86aa8 RB |
65 | ) |
66 | ASMMACRO(back_to_back_c0_hazard, | |
67 | _ehb | |
68 | ) | |
1da177e4 | 69 | /* |
d7d86aa8 | 70 | * gcc has a tradition of misscompiling the previous construct using the |
70342287 | 71 | * address of a label as argument to inline assembler. Gas otoh has the |
d7d86aa8 RB |
72 | * annoying difference between la and dla which are only usable for 32-bit |
73 | * rsp. 64-bit code, so can't be used without conditional compilation. | |
74 | * The alterantive is switching the assembler to 64-bit code which happens | |
75 | * to work right even for 32-bit code ... | |
1da177e4 | 76 | */ |
d7d86aa8 RB |
77 | #define instruction_hazard() \ |
78 | do { \ | |
79 | unsigned long tmp; \ | |
80 | \ | |
81 | __asm__ __volatile__( \ | |
82 | " .set mips64r2 \n" \ | |
83 | " dla %0, 1f \n" \ | |
84 | " jr.hb %0 \n" \ | |
85 | " .set mips0 \n" \ | |
86 | "1: \n" \ | |
87 | : "=r" (tmp)); \ | |
88 | } while (0) | |
1da177e4 | 89 | |
1c7c4451 KC |
90 | #elif (defined(CONFIG_CPU_MIPSR1) && !defined(CONFIG_MIPS_ALCHEMY)) || \ |
91 | defined(CONFIG_CPU_BMIPS) | |
572afc24 RB |
92 | |
93 | /* | |
94 | * These are slightly complicated by the fact that we guarantee R1 kernels to | |
95 | * run fine on R2 processors. | |
96 | */ | |
97 | ASMMACRO(mtc0_tlbw_hazard, | |
98 | _ssnop; _ssnop; _ehb | |
99 | ) | |
100 | ASMMACRO(tlbw_use_hazard, | |
101 | _ssnop; _ssnop; _ssnop; _ehb | |
102 | ) | |
103 | ASMMACRO(tlb_probe_hazard, | |
104 | _ssnop; _ssnop; _ssnop; _ehb | |
105 | ) | |
106 | ASMMACRO(irq_enable_hazard, | |
107 | _ssnop; _ssnop; _ssnop; _ehb | |
108 | ) | |
109 | ASMMACRO(irq_disable_hazard, | |
110 | _ssnop; _ssnop; _ssnop; _ehb | |
111 | ) | |
112 | ASMMACRO(back_to_back_c0_hazard, | |
113 | _ssnop; _ssnop; _ssnop; _ehb | |
114 | ) | |
115 | /* | |
116 | * gcc has a tradition of misscompiling the previous construct using the | |
70342287 | 117 | * address of a label as argument to inline assembler. Gas otoh has the |
572afc24 RB |
118 | * annoying difference between la and dla which are only usable for 32-bit |
119 | * rsp. 64-bit code, so can't be used without conditional compilation. | |
120 | * The alterantive is switching the assembler to 64-bit code which happens | |
121 | * to work right even for 32-bit code ... | |
122 | */ | |
123 | #define __instruction_hazard() \ | |
124 | do { \ | |
125 | unsigned long tmp; \ | |
126 | \ | |
127 | __asm__ __volatile__( \ | |
128 | " .set mips64r2 \n" \ | |
129 | " dla %0, 1f \n" \ | |
130 | " jr.hb %0 \n" \ | |
131 | " .set mips0 \n" \ | |
132 | "1: \n" \ | |
133 | : "=r" (tmp)); \ | |
134 | } while (0) | |
135 | ||
136 | #define instruction_hazard() \ | |
137 | do { \ | |
138 | if (cpu_has_mips_r2) \ | |
139 | __instruction_hazard(); \ | |
140 | } while (0) | |
141 | ||
42a4f17d | 142 | #elif defined(CONFIG_MIPS_ALCHEMY) || defined(CONFIG_CPU_CAVIUM_OCTEON) || \ |
15fb0a15 KC |
143 | defined(CONFIG_CPU_LOONGSON2) || defined(CONFIG_CPU_R10000) || \ |
144 | defined(CONFIG_CPU_R5500) | |
1da177e4 LT |
145 | |
146 | /* | |
d7d86aa8 | 147 | * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer. |
1da177e4 | 148 | */ |
1da177e4 | 149 | |
d7d86aa8 RB |
150 | ASMMACRO(mtc0_tlbw_hazard, |
151 | ) | |
152 | ASMMACRO(tlbw_use_hazard, | |
153 | ) | |
154 | ASMMACRO(tlb_probe_hazard, | |
155 | ) | |
156 | ASMMACRO(irq_enable_hazard, | |
157 | ) | |
158 | ASMMACRO(irq_disable_hazard, | |
159 | ) | |
160 | ASMMACRO(back_to_back_c0_hazard, | |
161 | ) | |
162 | #define instruction_hazard() do { } while (0) | |
1da177e4 | 163 | |
d7d86aa8 | 164 | #elif defined(CONFIG_CPU_SB1) |
1da177e4 LT |
165 | |
166 | /* | |
d7d86aa8 | 167 | * Mostly like R4000 for historic reasons |
1da177e4 | 168 | */ |
d7d86aa8 RB |
169 | ASMMACRO(mtc0_tlbw_hazard, |
170 | ) | |
171 | ASMMACRO(tlbw_use_hazard, | |
172 | ) | |
173 | ASMMACRO(tlb_probe_hazard, | |
174 | ) | |
175 | ASMMACRO(irq_enable_hazard, | |
176 | ) | |
177 | ASMMACRO(irq_disable_hazard, | |
178 | _ssnop; _ssnop; _ssnop | |
179 | ) | |
180 | ASMMACRO(back_to_back_c0_hazard, | |
181 | ) | |
182 | #define instruction_hazard() do { } while (0) | |
5068debf | 183 | |
1da177e4 LT |
184 | #else |
185 | ||
186 | /* | |
d7d86aa8 RB |
187 | * Finally the catchall case for all other processors including R4000, R4400, |
188 | * R4600, R4700, R5000, RM7000, NEC VR41xx etc. | |
a3c4946d | 189 | * |
d7d86aa8 RB |
190 | * The taken branch will result in a two cycle penalty for the two killed |
191 | * instructions on R4000 / R4400. Other processors only have a single cycle | |
192 | * hazard so this is nice trick to have an optimal code for a range of | |
193 | * processors. | |
7043ad4f | 194 | */ |
d7d86aa8 | 195 | ASMMACRO(mtc0_tlbw_hazard, |
3f318370 | 196 | nop; nop |
d7d86aa8 RB |
197 | ) |
198 | ASMMACRO(tlbw_use_hazard, | |
199 | nop; nop; nop | |
200 | ) | |
201 | ASMMACRO(tlb_probe_hazard, | |
202 | nop; nop; nop | |
203 | ) | |
204 | ASMMACRO(irq_enable_hazard, | |
7b0fdaa6 | 205 | _ssnop; _ssnop; _ssnop; |
d7d86aa8 RB |
206 | ) |
207 | ASMMACRO(irq_disable_hazard, | |
208 | nop; nop; nop | |
209 | ) | |
210 | ASMMACRO(back_to_back_c0_hazard, | |
211 | _ssnop; _ssnop; _ssnop; | |
212 | ) | |
cc61c1fe | 213 | #define instruction_hazard() do { } while (0) |
41c594ab | 214 | |
d7d86aa8 | 215 | #endif |
1da177e4 | 216 | |
0b624956 CD |
217 | |
218 | /* FPU hazards */ | |
219 | ||
220 | #if defined(CONFIG_CPU_SB1) | |
221 | ASMMACRO(enable_fpu_hazard, | |
222 | .set push; | |
223 | .set mips64; | |
224 | .set noreorder; | |
225 | _ssnop; | |
21a151d8 | 226 | bnezl $0, .+4; |
a1b53a7b | 227 | _ssnop; |
0b624956 CD |
228 | .set pop |
229 | ) | |
230 | ASMMACRO(disable_fpu_hazard, | |
231 | ) | |
232 | ||
233 | #elif defined(CONFIG_CPU_MIPSR2) | |
234 | ASMMACRO(enable_fpu_hazard, | |
235 | _ehb | |
236 | ) | |
237 | ASMMACRO(disable_fpu_hazard, | |
238 | _ehb | |
239 | ) | |
240 | #else | |
241 | ASMMACRO(enable_fpu_hazard, | |
242 | nop; nop; nop; nop | |
243 | ) | |
244 | ASMMACRO(disable_fpu_hazard, | |
245 | _ehb | |
246 | ) | |
247 | #endif | |
248 | ||
1da177e4 | 249 | #endif /* _ASM_HAZARDS_H */ |