Commit | Line | Data |
---|---|---|
1da177e4 | 1 | /* |
4baa9922 | 2 | * arch/arm/include/asm/assembler.h |
1da177e4 LT |
3 | * |
4 | * Copyright (C) 1996-2000 Russell King | |
5 | * | |
6 | * This program is free software; you can redistribute it and/or modify | |
7 | * it under the terms of the GNU General Public License version 2 as | |
8 | * published by the Free Software Foundation. | |
9 | * | |
10 | * This file contains arm architecture specific defines | |
11 | * for the different processors. | |
12 | * | |
13 | * Do not include any C declarations in this file - it is included by | |
14 | * assembler source. | |
15 | */ | |
2bc58a6f MD |
16 | #ifndef __ASM_ASSEMBLER_H__ |
17 | #define __ASM_ASSEMBLER_H__ | |
18 | ||
1da177e4 LT |
19 | #ifndef __ASSEMBLY__ |
20 | #error "Only include this from assembly code" | |
21 | #endif | |
22 | ||
23 | #include <asm/ptrace.h> | |
247055aa | 24 | #include <asm/domain.h> |
80c59daf | 25 | #include <asm/opcodes-virt.h> |
1da177e4 | 26 | |
6f6f6a70 RH |
27 | #define IOMEM(x) (x) |
28 | ||
1da177e4 LT |
29 | /* |
30 | * Endian independent macros for shifting bytes within registers. | |
31 | */ | |
32 | #ifndef __ARMEB__ | |
d98b90ea VK |
33 | #define lspull lsr |
34 | #define lspush lsl | |
1da177e4 LT |
35 | #define get_byte_0 lsl #0 |
36 | #define get_byte_1 lsr #8 | |
37 | #define get_byte_2 lsr #16 | |
38 | #define get_byte_3 lsr #24 | |
39 | #define put_byte_0 lsl #0 | |
40 | #define put_byte_1 lsl #8 | |
41 | #define put_byte_2 lsl #16 | |
42 | #define put_byte_3 lsl #24 | |
43 | #else | |
d98b90ea VK |
44 | #define lspull lsl |
45 | #define lspush lsr | |
1da177e4 LT |
46 | #define get_byte_0 lsr #24 |
47 | #define get_byte_1 lsr #16 | |
48 | #define get_byte_2 lsr #8 | |
49 | #define get_byte_3 lsl #0 | |
50 | #define put_byte_0 lsl #24 | |
51 | #define put_byte_1 lsl #16 | |
52 | #define put_byte_2 lsl #8 | |
53 | #define put_byte_3 lsl #0 | |
54 | #endif | |
55 | ||
457c2403 BD |
56 | /* Select code for any configuration running in BE8 mode */ |
57 | #ifdef CONFIG_CPU_ENDIAN_BE8 | |
58 | #define ARM_BE8(code...) code | |
59 | #else | |
60 | #define ARM_BE8(code...) | |
61 | #endif | |
62 | ||
1da177e4 LT |
63 | /* |
64 | * Data preload for architectures that support it | |
65 | */ | |
66 | #if __LINUX_ARM_ARCH__ >= 5 | |
67 | #define PLD(code...) code | |
68 | #else | |
69 | #define PLD(code...) | |
70 | #endif | |
71 | ||
2239aff6 NP |
72 | /* |
73 | * This can be used to enable code to cacheline align the destination | |
74 | * pointer when bulk writing to memory. Experiments on StrongARM and | |
75 | * XScale didn't show this a worthwhile thing to do when the cache is not | |
76 | * set to write-allocate (this would need further testing on XScale when WA | |
77 | * is used). | |
78 | * | |
79 | * On Feroceon there is much to gain however, regardless of cache mode. | |
80 | */ | |
81 | #ifdef CONFIG_CPU_FEROCEON | |
82 | #define CALGN(code...) code | |
83 | #else | |
84 | #define CALGN(code...) | |
85 | #endif | |
86 | ||
1da177e4 | 87 | /* |
9c42954d | 88 | * Enable and disable interrupts |
1da177e4 | 89 | */ |
59d1ff3b | 90 | #if __LINUX_ARM_ARCH__ >= 6 |
0d928b0b | 91 | .macro disable_irq_notrace |
59d1ff3b | 92 | cpsid i |
9c42954d RK |
93 | .endm |
94 | ||
0d928b0b | 95 | .macro enable_irq_notrace |
9c42954d RK |
96 | cpsie i |
97 | .endm | |
59d1ff3b | 98 | #else |
0d928b0b | 99 | .macro disable_irq_notrace |
9c42954d RK |
100 | msr cpsr_c, #PSR_I_BIT | SVC_MODE |
101 | .endm | |
102 | ||
0d928b0b | 103 | .macro enable_irq_notrace |
9c42954d RK |
104 | msr cpsr_c, #SVC_MODE |
105 | .endm | |
59d1ff3b | 106 | #endif |
9c42954d | 107 | |
0d928b0b UKK |
108 | .macro asm_trace_hardirqs_off |
109 | #if defined(CONFIG_TRACE_IRQFLAGS) | |
110 | stmdb sp!, {r0-r3, ip, lr} | |
111 | bl trace_hardirqs_off | |
112 | ldmia sp!, {r0-r3, ip, lr} | |
113 | #endif | |
114 | .endm | |
115 | ||
116 | .macro asm_trace_hardirqs_on_cond, cond | |
117 | #if defined(CONFIG_TRACE_IRQFLAGS) | |
118 | /* | |
119 | * actually the registers should be pushed and pop'd conditionally, but | |
120 | * after bl the flags are certainly clobbered | |
121 | */ | |
122 | stmdb sp!, {r0-r3, ip, lr} | |
123 | bl\cond trace_hardirqs_on | |
124 | ldmia sp!, {r0-r3, ip, lr} | |
125 | #endif | |
126 | .endm | |
127 | ||
128 | .macro asm_trace_hardirqs_on | |
129 | asm_trace_hardirqs_on_cond al | |
130 | .endm | |
131 | ||
132 | .macro disable_irq | |
133 | disable_irq_notrace | |
134 | asm_trace_hardirqs_off | |
135 | .endm | |
136 | ||
137 | .macro enable_irq | |
138 | asm_trace_hardirqs_on | |
139 | enable_irq_notrace | |
140 | .endm | |
9c42954d RK |
141 | /* |
142 | * Save the current IRQ state and disable IRQs. Note that this macro | |
143 | * assumes FIQs are enabled, and that the processor is in SVC mode. | |
144 | */ | |
145 | .macro save_and_disable_irqs, oldcpsr | |
55bdd694 CM |
146 | #ifdef CONFIG_CPU_V7M |
147 | mrs \oldcpsr, primask | |
148 | #else | |
9c42954d | 149 | mrs \oldcpsr, cpsr |
55bdd694 | 150 | #endif |
9c42954d | 151 | disable_irq |
1da177e4 LT |
152 | .endm |
153 | ||
8e43a905 RV |
154 | .macro save_and_disable_irqs_notrace, oldcpsr |
155 | mrs \oldcpsr, cpsr | |
156 | disable_irq_notrace | |
157 | .endm | |
158 | ||
1da177e4 LT |
159 | /* |
160 | * Restore interrupt state previously stored in a register. We don't | |
161 | * guarantee that this will preserve the flags. | |
162 | */ | |
0d928b0b | 163 | .macro restore_irqs_notrace, oldcpsr |
55bdd694 CM |
164 | #ifdef CONFIG_CPU_V7M |
165 | msr primask, \oldcpsr | |
166 | #else | |
1da177e4 | 167 | msr cpsr_c, \oldcpsr |
55bdd694 | 168 | #endif |
1da177e4 LT |
169 | .endm |
170 | ||
0d928b0b UKK |
171 | .macro restore_irqs, oldcpsr |
172 | tst \oldcpsr, #PSR_I_BIT | |
173 | asm_trace_hardirqs_on_cond eq | |
174 | restore_irqs_notrace \oldcpsr | |
175 | .endm | |
176 | ||
39ad04cc CM |
177 | /* |
178 | * Get current thread_info. | |
179 | */ | |
180 | .macro get_thread_info, rd | |
181 | ARM( mov \rd, sp, lsr #13 ) | |
182 | THUMB( mov \rd, sp ) | |
183 | THUMB( lsr \rd, \rd, #13 ) | |
184 | mov \rd, \rd, lsl #13 | |
185 | .endm | |
186 | ||
1da177e4 LT |
187 | #define USER(x...) \ |
188 | 9999: x; \ | |
4260415f | 189 | .pushsection __ex_table,"a"; \ |
1da177e4 LT |
190 | .align 3; \ |
191 | .long 9999b,9001f; \ | |
4260415f | 192 | .popsection |
bac4e960 | 193 | |
f00ec48f RK |
194 | #ifdef CONFIG_SMP |
195 | #define ALT_SMP(instr...) \ | |
196 | 9998: instr | |
ed3768a8 DM |
197 | /* |
198 | * Note: if you get assembler errors from ALT_UP() when building with | |
199 | * CONFIG_THUMB2_KERNEL, you almost certainly need to use | |
200 | * ALT_SMP( W(instr) ... ) | |
201 | */ | |
f00ec48f RK |
202 | #define ALT_UP(instr...) \ |
203 | .pushsection ".alt.smp.init", "a" ;\ | |
204 | .long 9998b ;\ | |
ed3768a8 DM |
205 | 9997: instr ;\ |
206 | .if . - 9997b != 4 ;\ | |
207 | .error "ALT_UP() content must assemble to exactly 4 bytes";\ | |
208 | .endif ;\ | |
f00ec48f RK |
209 | .popsection |
210 | #define ALT_UP_B(label) \ | |
211 | .equ up_b_offset, label - 9998b ;\ | |
212 | .pushsection ".alt.smp.init", "a" ;\ | |
213 | .long 9998b ;\ | |
ed3768a8 | 214 | W(b) . + up_b_offset ;\ |
f00ec48f RK |
215 | .popsection |
216 | #else | |
217 | #define ALT_SMP(instr...) | |
218 | #define ALT_UP(instr...) instr | |
219 | #define ALT_UP_B(label) b label | |
220 | #endif | |
221 | ||
d675d0bc WD |
222 | /* |
223 | * Instruction barrier | |
224 | */ | |
225 | .macro instr_sync | |
226 | #if __LINUX_ARM_ARCH__ >= 7 | |
227 | isb | |
228 | #elif __LINUX_ARM_ARCH__ == 6 | |
229 | mcr p15, 0, r0, c7, c5, 4 | |
230 | #endif | |
231 | .endm | |
232 | ||
bac4e960 RK |
233 | /* |
234 | * SMP data memory barrier | |
235 | */ | |
ed3768a8 | 236 | .macro smp_dmb mode |
bac4e960 RK |
237 | #ifdef CONFIG_SMP |
238 | #if __LINUX_ARM_ARCH__ >= 7 | |
ed3768a8 | 239 | .ifeqs "\mode","arm" |
3ea12806 | 240 | ALT_SMP(dmb ish) |
ed3768a8 | 241 | .else |
3ea12806 | 242 | ALT_SMP(W(dmb) ish) |
ed3768a8 | 243 | .endif |
bac4e960 | 244 | #elif __LINUX_ARM_ARCH__ == 6 |
f00ec48f RK |
245 | ALT_SMP(mcr p15, 0, r0, c7, c10, 5) @ dmb |
246 | #else | |
247 | #error Incompatible SMP platform | |
bac4e960 | 248 | #endif |
ed3768a8 | 249 | .ifeqs "\mode","arm" |
f00ec48f | 250 | ALT_UP(nop) |
ed3768a8 DM |
251 | .else |
252 | ALT_UP(W(nop)) | |
253 | .endif | |
bac4e960 RK |
254 | #endif |
255 | .endm | |
b86040a5 | 256 | |
55bdd694 CM |
257 | #if defined(CONFIG_CPU_V7M) |
258 | /* | |
259 | * setmode is used to assert to be in svc mode during boot. For v7-M | |
260 | * this is done in __v7m_setup, so setmode can be empty here. | |
261 | */ | |
262 | .macro setmode, mode, reg | |
263 | .endm | |
264 | #elif defined(CONFIG_THUMB2_KERNEL) | |
b86040a5 CM |
265 | .macro setmode, mode, reg |
266 | mov \reg, #\mode | |
267 | msr cpsr_c, \reg | |
268 | .endm | |
269 | #else | |
270 | .macro setmode, mode, reg | |
271 | msr cpsr_c, #\mode | |
272 | .endm | |
273 | #endif | |
8b592783 | 274 | |
80c59daf DM |
275 | /* |
276 | * Helper macro to enter SVC mode cleanly and mask interrupts. reg is | |
277 | * a scratch register for the macro to overwrite. | |
278 | * | |
279 | * This macro is intended for forcing the CPU into SVC mode at boot time. | |
280 | * you cannot return to the original mode. | |
80c59daf DM |
281 | */ |
282 | .macro safe_svcmode_maskall reg:req | |
1ecec696 | 283 | #if __LINUX_ARM_ARCH__ >= 6 |
80c59daf | 284 | mrs \reg , cpsr |
8e9c24a2 RK |
285 | eor \reg, \reg, #HYP_MODE |
286 | tst \reg, #MODE_MASK | |
80c59daf | 287 | bic \reg , \reg , #MODE_MASK |
8e9c24a2 | 288 | orr \reg , \reg , #PSR_I_BIT | PSR_F_BIT | SVC_MODE |
80c59daf | 289 | THUMB( orr \reg , \reg , #PSR_T_BIT ) |
80c59daf | 290 | bne 1f |
2a552d5e MZ |
291 | orr \reg, \reg, #PSR_A_BIT |
292 | adr lr, BSYM(2f) | |
293 | msr spsr_cxsf, \reg | |
80c59daf DM |
294 | __MSR_ELR_HYP(14) |
295 | __ERET | |
2a552d5e | 296 | 1: msr cpsr_c, \reg |
80c59daf | 297 | 2: |
1ecec696 DM |
298 | #else |
299 | /* | |
300 | * workaround for possibly broken pre-v6 hardware | |
301 | * (akita, Sharp Zaurus C-1000, PXA270-based) | |
302 | */ | |
303 | setmode PSR_F_BIT | PSR_I_BIT | SVC_MODE, \reg | |
304 | #endif | |
80c59daf DM |
305 | .endm |
306 | ||
8b592783 CM |
307 | /* |
308 | * STRT/LDRT access macros with ARM and Thumb-2 variants | |
309 | */ | |
310 | #ifdef CONFIG_THUMB2_KERNEL | |
311 | ||
4e7682d0 | 312 | .macro usraccoff, instr, reg, ptr, inc, off, cond, abort, t=TUSER() |
8b592783 CM |
313 | 9999: |
314 | .if \inc == 1 | |
247055aa | 315 | \instr\cond\()b\()\t\().w \reg, [\ptr, #\off] |
8b592783 | 316 | .elseif \inc == 4 |
247055aa | 317 | \instr\cond\()\t\().w \reg, [\ptr, #\off] |
8b592783 CM |
318 | .else |
319 | .error "Unsupported inc macro argument" | |
320 | .endif | |
321 | ||
4260415f | 322 | .pushsection __ex_table,"a" |
8b592783 CM |
323 | .align 3 |
324 | .long 9999b, \abort | |
4260415f | 325 | .popsection |
8b592783 CM |
326 | .endm |
327 | ||
328 | .macro usracc, instr, reg, ptr, inc, cond, rept, abort | |
329 | @ explicit IT instruction needed because of the label | |
330 | @ introduced by the USER macro | |
331 | .ifnc \cond,al | |
332 | .if \rept == 1 | |
333 | itt \cond | |
334 | .elseif \rept == 2 | |
335 | ittt \cond | |
336 | .else | |
337 | .error "Unsupported rept macro argument" | |
338 | .endif | |
339 | .endif | |
340 | ||
341 | @ Slightly optimised to avoid incrementing the pointer twice | |
342 | usraccoff \instr, \reg, \ptr, \inc, 0, \cond, \abort | |
343 | .if \rept == 2 | |
1142b71d | 344 | usraccoff \instr, \reg, \ptr, \inc, \inc, \cond, \abort |
8b592783 CM |
345 | .endif |
346 | ||
347 | add\cond \ptr, #\rept * \inc | |
348 | .endm | |
349 | ||
350 | #else /* !CONFIG_THUMB2_KERNEL */ | |
351 | ||
4e7682d0 | 352 | .macro usracc, instr, reg, ptr, inc, cond, rept, abort, t=TUSER() |
8b592783 CM |
353 | .rept \rept |
354 | 9999: | |
355 | .if \inc == 1 | |
247055aa | 356 | \instr\cond\()b\()\t \reg, [\ptr], #\inc |
8b592783 | 357 | .elseif \inc == 4 |
247055aa | 358 | \instr\cond\()\t \reg, [\ptr], #\inc |
8b592783 CM |
359 | .else |
360 | .error "Unsupported inc macro argument" | |
361 | .endif | |
362 | ||
4260415f | 363 | .pushsection __ex_table,"a" |
8b592783 CM |
364 | .align 3 |
365 | .long 9999b, \abort | |
4260415f | 366 | .popsection |
8b592783 CM |
367 | .endr |
368 | .endm | |
369 | ||
370 | #endif /* CONFIG_THUMB2_KERNEL */ | |
371 | ||
372 | .macro strusr, reg, ptr, inc, cond=al, rept=1, abort=9001f | |
373 | usracc str, \reg, \ptr, \inc, \cond, \rept, \abort | |
374 | .endm | |
375 | ||
376 | .macro ldrusr, reg, ptr, inc, cond=al, rept=1, abort=9001f | |
377 | usracc ldr, \reg, \ptr, \inc, \cond, \rept, \abort | |
378 | .endm | |
8f51965e DM |
379 | |
380 | /* Utility macro for declaring string literals */ | |
381 | .macro string name:req, string | |
382 | .type \name , #object | |
383 | \name: | |
384 | .asciz "\string" | |
385 | .size \name , . - \name | |
386 | .endm | |
387 | ||
8404663f RK |
388 | .macro check_uaccess, addr:req, size:req, limit:req, tmp:req, bad:req |
389 | #ifndef CONFIG_CPU_USE_DOMAINS | |
390 | adds \tmp, \addr, #\size - 1 | |
391 | sbcccs \tmp, \tmp, \limit | |
392 | bcs \bad | |
393 | #endif | |
394 | .endm | |
395 | ||
2bc58a6f | 396 | #endif /* __ASM_ASSEMBLER_H__ */ |