Commit | Line | Data |
---|---|---|
1da177e4 | 1 | /* |
4baa9922 | 2 | * arch/arm/include/asm/assembler.h |
1da177e4 LT |
3 | * |
4 | * Copyright (C) 1996-2000 Russell King | |
5 | * | |
6 | * This program is free software; you can redistribute it and/or modify | |
7 | * it under the terms of the GNU General Public License version 2 as | |
8 | * published by the Free Software Foundation. | |
9 | * | |
10 | * This file contains arm architecture specific defines | |
11 | * for the different processors. | |
12 | * | |
13 | * Do not include any C declarations in this file - it is included by | |
14 | * assembler source. | |
15 | */ | |
2bc58a6f MD |
16 | #ifndef __ASM_ASSEMBLER_H__ |
17 | #define __ASM_ASSEMBLER_H__ | |
18 | ||
1da177e4 LT |
19 | #ifndef __ASSEMBLY__ |
20 | #error "Only include this from assembly code" | |
21 | #endif | |
22 | ||
23 | #include <asm/ptrace.h> | |
247055aa | 24 | #include <asm/domain.h> |
80c59daf | 25 | #include <asm/opcodes-virt.h> |
0b1f68e8 | 26 | #include <asm/asm-offsets.h> |
1da177e4 | 27 | |
6f6f6a70 RH |
28 | #define IOMEM(x) (x) |
29 | ||
1da177e4 LT |
30 | /* |
31 | * Endian independent macros for shifting bytes within registers. | |
32 | */ | |
33 | #ifndef __ARMEB__ | |
d98b90ea VK |
34 | #define lspull lsr |
35 | #define lspush lsl | |
1da177e4 LT |
36 | #define get_byte_0 lsl #0 |
37 | #define get_byte_1 lsr #8 | |
38 | #define get_byte_2 lsr #16 | |
39 | #define get_byte_3 lsr #24 | |
40 | #define put_byte_0 lsl #0 | |
41 | #define put_byte_1 lsl #8 | |
42 | #define put_byte_2 lsl #16 | |
43 | #define put_byte_3 lsl #24 | |
44 | #else | |
d98b90ea VK |
45 | #define lspull lsl |
46 | #define lspush lsr | |
1da177e4 LT |
47 | #define get_byte_0 lsr #24 |
48 | #define get_byte_1 lsr #16 | |
49 | #define get_byte_2 lsr #8 | |
50 | #define get_byte_3 lsl #0 | |
51 | #define put_byte_0 lsl #24 | |
52 | #define put_byte_1 lsl #16 | |
53 | #define put_byte_2 lsl #8 | |
54 | #define put_byte_3 lsl #0 | |
55 | #endif | |
56 | ||
457c2403 BD |
57 | /* Select code for any configuration running in BE8 mode */ |
58 | #ifdef CONFIG_CPU_ENDIAN_BE8 | |
59 | #define ARM_BE8(code...) code | |
60 | #else | |
61 | #define ARM_BE8(code...) | |
62 | #endif | |
63 | ||
1da177e4 LT |
64 | /* |
65 | * Data preload for architectures that support it | |
66 | */ | |
67 | #if __LINUX_ARM_ARCH__ >= 5 | |
68 | #define PLD(code...) code | |
69 | #else | |
70 | #define PLD(code...) | |
71 | #endif | |
72 | ||
2239aff6 NP |
73 | /* |
74 | * This can be used to enable code to cacheline align the destination | |
75 | * pointer when bulk writing to memory. Experiments on StrongARM and | |
76 | * XScale didn't show this a worthwhile thing to do when the cache is not | |
77 | * set to write-allocate (this would need further testing on XScale when WA | |
78 | * is used). | |
79 | * | |
80 | * On Feroceon there is much to gain however, regardless of cache mode. | |
81 | */ | |
82 | #ifdef CONFIG_CPU_FEROCEON | |
83 | #define CALGN(code...) code | |
84 | #else | |
85 | #define CALGN(code...) | |
86 | #endif | |
87 | ||
1da177e4 | 88 | /* |
9c42954d | 89 | * Enable and disable interrupts |
1da177e4 | 90 | */ |
59d1ff3b | 91 | #if __LINUX_ARM_ARCH__ >= 6 |
0d928b0b | 92 | .macro disable_irq_notrace |
59d1ff3b | 93 | cpsid i |
9c42954d RK |
94 | .endm |
95 | ||
0d928b0b | 96 | .macro enable_irq_notrace |
9c42954d RK |
97 | cpsie i |
98 | .endm | |
59d1ff3b | 99 | #else |
0d928b0b | 100 | .macro disable_irq_notrace |
9c42954d RK |
101 | msr cpsr_c, #PSR_I_BIT | SVC_MODE |
102 | .endm | |
103 | ||
0d928b0b | 104 | .macro enable_irq_notrace |
9c42954d RK |
105 | msr cpsr_c, #SVC_MODE |
106 | .endm | |
59d1ff3b | 107 | #endif |
9c42954d | 108 | |
0d928b0b UKK |
109 | .macro asm_trace_hardirqs_off |
110 | #if defined(CONFIG_TRACE_IRQFLAGS) | |
111 | stmdb sp!, {r0-r3, ip, lr} | |
112 | bl trace_hardirqs_off | |
113 | ldmia sp!, {r0-r3, ip, lr} | |
114 | #endif | |
115 | .endm | |
116 | ||
117 | .macro asm_trace_hardirqs_on_cond, cond | |
118 | #if defined(CONFIG_TRACE_IRQFLAGS) | |
119 | /* | |
120 | * actually the registers should be pushed and pop'd conditionally, but | |
121 | * after bl the flags are certainly clobbered | |
122 | */ | |
123 | stmdb sp!, {r0-r3, ip, lr} | |
124 | bl\cond trace_hardirqs_on | |
125 | ldmia sp!, {r0-r3, ip, lr} | |
126 | #endif | |
127 | .endm | |
128 | ||
129 | .macro asm_trace_hardirqs_on | |
130 | asm_trace_hardirqs_on_cond al | |
131 | .endm | |
132 | ||
133 | .macro disable_irq | |
134 | disable_irq_notrace | |
135 | asm_trace_hardirqs_off | |
136 | .endm | |
137 | ||
138 | .macro enable_irq | |
139 | asm_trace_hardirqs_on | |
140 | enable_irq_notrace | |
141 | .endm | |
9c42954d RK |
142 | /* |
143 | * Save the current IRQ state and disable IRQs. Note that this macro | |
144 | * assumes FIQs are enabled, and that the processor is in SVC mode. | |
145 | */ | |
146 | .macro save_and_disable_irqs, oldcpsr | |
55bdd694 CM |
147 | #ifdef CONFIG_CPU_V7M |
148 | mrs \oldcpsr, primask | |
149 | #else | |
9c42954d | 150 | mrs \oldcpsr, cpsr |
55bdd694 | 151 | #endif |
9c42954d | 152 | disable_irq |
1da177e4 LT |
153 | .endm |
154 | ||
8e43a905 RV |
155 | .macro save_and_disable_irqs_notrace, oldcpsr |
156 | mrs \oldcpsr, cpsr | |
157 | disable_irq_notrace | |
158 | .endm | |
159 | ||
1da177e4 LT |
160 | /* |
161 | * Restore interrupt state previously stored in a register. We don't | |
162 | * guarantee that this will preserve the flags. | |
163 | */ | |
0d928b0b | 164 | .macro restore_irqs_notrace, oldcpsr |
55bdd694 CM |
165 | #ifdef CONFIG_CPU_V7M |
166 | msr primask, \oldcpsr | |
167 | #else | |
1da177e4 | 168 | msr cpsr_c, \oldcpsr |
55bdd694 | 169 | #endif |
1da177e4 LT |
170 | .endm |
171 | ||
0d928b0b UKK |
172 | .macro restore_irqs, oldcpsr |
173 | tst \oldcpsr, #PSR_I_BIT | |
174 | asm_trace_hardirqs_on_cond eq | |
175 | restore_irqs_notrace \oldcpsr | |
176 | .endm | |
177 | ||
39ad04cc CM |
178 | /* |
179 | * Get current thread_info. | |
180 | */ | |
181 | .macro get_thread_info, rd | |
182 | ARM( mov \rd, sp, lsr #13 ) | |
183 | THUMB( mov \rd, sp ) | |
184 | THUMB( lsr \rd, \rd, #13 ) | |
185 | mov \rd, \rd, lsl #13 | |
186 | .endm | |
187 | ||
0b1f68e8 CM |
188 | /* |
189 | * Increment/decrement the preempt count. | |
190 | */ | |
191 | #ifdef CONFIG_PREEMPT_COUNT | |
192 | .macro inc_preempt_count, ti, tmp | |
193 | ldr \tmp, [\ti, #TI_PREEMPT] @ get preempt count | |
194 | add \tmp, \tmp, #1 @ increment it | |
195 | str \tmp, [\ti, #TI_PREEMPT] | |
196 | .endm | |
197 | ||
198 | .macro dec_preempt_count, ti, tmp | |
199 | ldr \tmp, [\ti, #TI_PREEMPT] @ get preempt count | |
200 | sub \tmp, \tmp, #1 @ decrement it | |
201 | str \tmp, [\ti, #TI_PREEMPT] | |
202 | .endm | |
203 | ||
204 | .macro dec_preempt_count_ti, ti, tmp | |
205 | get_thread_info \ti | |
206 | dec_preempt_count \ti, \tmp | |
207 | .endm | |
208 | #else | |
209 | .macro inc_preempt_count, ti, tmp | |
210 | .endm | |
211 | ||
212 | .macro dec_preempt_count, ti, tmp | |
213 | .endm | |
214 | ||
215 | .macro dec_preempt_count_ti, ti, tmp | |
216 | .endm | |
217 | #endif | |
218 | ||
1da177e4 LT |
219 | #define USER(x...) \ |
220 | 9999: x; \ | |
4260415f | 221 | .pushsection __ex_table,"a"; \ |
1da177e4 LT |
222 | .align 3; \ |
223 | .long 9999b,9001f; \ | |
4260415f | 224 | .popsection |
bac4e960 | 225 | |
f00ec48f RK |
226 | #ifdef CONFIG_SMP |
227 | #define ALT_SMP(instr...) \ | |
228 | 9998: instr | |
ed3768a8 DM |
229 | /* |
230 | * Note: if you get assembler errors from ALT_UP() when building with | |
231 | * CONFIG_THUMB2_KERNEL, you almost certainly need to use | |
232 | * ALT_SMP( W(instr) ... ) | |
233 | */ | |
f00ec48f RK |
234 | #define ALT_UP(instr...) \ |
235 | .pushsection ".alt.smp.init", "a" ;\ | |
236 | .long 9998b ;\ | |
ed3768a8 DM |
237 | 9997: instr ;\ |
238 | .if . - 9997b != 4 ;\ | |
239 | .error "ALT_UP() content must assemble to exactly 4 bytes";\ | |
240 | .endif ;\ | |
f00ec48f RK |
241 | .popsection |
242 | #define ALT_UP_B(label) \ | |
243 | .equ up_b_offset, label - 9998b ;\ | |
244 | .pushsection ".alt.smp.init", "a" ;\ | |
245 | .long 9998b ;\ | |
ed3768a8 | 246 | W(b) . + up_b_offset ;\ |
f00ec48f RK |
247 | .popsection |
248 | #else | |
249 | #define ALT_SMP(instr...) | |
250 | #define ALT_UP(instr...) instr | |
251 | #define ALT_UP_B(label) b label | |
252 | #endif | |
253 | ||
d675d0bc WD |
254 | /* |
255 | * Instruction barrier | |
256 | */ | |
257 | .macro instr_sync | |
258 | #if __LINUX_ARM_ARCH__ >= 7 | |
259 | isb | |
260 | #elif __LINUX_ARM_ARCH__ == 6 | |
261 | mcr p15, 0, r0, c7, c5, 4 | |
262 | #endif | |
263 | .endm | |
264 | ||
bac4e960 RK |
265 | /* |
266 | * SMP data memory barrier | |
267 | */ | |
ed3768a8 | 268 | .macro smp_dmb mode |
bac4e960 RK |
269 | #ifdef CONFIG_SMP |
270 | #if __LINUX_ARM_ARCH__ >= 7 | |
ed3768a8 | 271 | .ifeqs "\mode","arm" |
3ea12806 | 272 | ALT_SMP(dmb ish) |
ed3768a8 | 273 | .else |
3ea12806 | 274 | ALT_SMP(W(dmb) ish) |
ed3768a8 | 275 | .endif |
bac4e960 | 276 | #elif __LINUX_ARM_ARCH__ == 6 |
f00ec48f RK |
277 | ALT_SMP(mcr p15, 0, r0, c7, c10, 5) @ dmb |
278 | #else | |
279 | #error Incompatible SMP platform | |
bac4e960 | 280 | #endif |
ed3768a8 | 281 | .ifeqs "\mode","arm" |
f00ec48f | 282 | ALT_UP(nop) |
ed3768a8 DM |
283 | .else |
284 | ALT_UP(W(nop)) | |
285 | .endif | |
bac4e960 RK |
286 | #endif |
287 | .endm | |
b86040a5 | 288 | |
55bdd694 CM |
289 | #if defined(CONFIG_CPU_V7M) |
290 | /* | |
291 | * setmode is used to assert to be in svc mode during boot. For v7-M | |
292 | * this is done in __v7m_setup, so setmode can be empty here. | |
293 | */ | |
294 | .macro setmode, mode, reg | |
295 | .endm | |
296 | #elif defined(CONFIG_THUMB2_KERNEL) | |
b86040a5 CM |
297 | .macro setmode, mode, reg |
298 | mov \reg, #\mode | |
299 | msr cpsr_c, \reg | |
300 | .endm | |
301 | #else | |
302 | .macro setmode, mode, reg | |
303 | msr cpsr_c, #\mode | |
304 | .endm | |
305 | #endif | |
8b592783 | 306 | |
80c59daf DM |
307 | /* |
308 | * Helper macro to enter SVC mode cleanly and mask interrupts. reg is | |
309 | * a scratch register for the macro to overwrite. | |
310 | * | |
311 | * This macro is intended for forcing the CPU into SVC mode at boot time. | |
312 | * you cannot return to the original mode. | |
80c59daf DM |
313 | */ |
314 | .macro safe_svcmode_maskall reg:req | |
1ecec696 | 315 | #if __LINUX_ARM_ARCH__ >= 6 |
80c59daf | 316 | mrs \reg , cpsr |
8e9c24a2 RK |
317 | eor \reg, \reg, #HYP_MODE |
318 | tst \reg, #MODE_MASK | |
80c59daf | 319 | bic \reg , \reg , #MODE_MASK |
8e9c24a2 | 320 | orr \reg , \reg , #PSR_I_BIT | PSR_F_BIT | SVC_MODE |
80c59daf | 321 | THUMB( orr \reg , \reg , #PSR_T_BIT ) |
80c59daf | 322 | bne 1f |
2a552d5e MZ |
323 | orr \reg, \reg, #PSR_A_BIT |
324 | adr lr, BSYM(2f) | |
325 | msr spsr_cxsf, \reg | |
80c59daf DM |
326 | __MSR_ELR_HYP(14) |
327 | __ERET | |
2a552d5e | 328 | 1: msr cpsr_c, \reg |
80c59daf | 329 | 2: |
1ecec696 DM |
330 | #else |
331 | /* | |
332 | * workaround for possibly broken pre-v6 hardware | |
333 | * (akita, Sharp Zaurus C-1000, PXA270-based) | |
334 | */ | |
335 | setmode PSR_F_BIT | PSR_I_BIT | SVC_MODE, \reg | |
336 | #endif | |
80c59daf DM |
337 | .endm |
338 | ||
8b592783 CM |
339 | /* |
340 | * STRT/LDRT access macros with ARM and Thumb-2 variants | |
341 | */ | |
342 | #ifdef CONFIG_THUMB2_KERNEL | |
343 | ||
4e7682d0 | 344 | .macro usraccoff, instr, reg, ptr, inc, off, cond, abort, t=TUSER() |
8b592783 CM |
345 | 9999: |
346 | .if \inc == 1 | |
247055aa | 347 | \instr\cond\()b\()\t\().w \reg, [\ptr, #\off] |
8b592783 | 348 | .elseif \inc == 4 |
247055aa | 349 | \instr\cond\()\t\().w \reg, [\ptr, #\off] |
8b592783 CM |
350 | .else |
351 | .error "Unsupported inc macro argument" | |
352 | .endif | |
353 | ||
4260415f | 354 | .pushsection __ex_table,"a" |
8b592783 CM |
355 | .align 3 |
356 | .long 9999b, \abort | |
4260415f | 357 | .popsection |
8b592783 CM |
358 | .endm |
359 | ||
360 | .macro usracc, instr, reg, ptr, inc, cond, rept, abort | |
361 | @ explicit IT instruction needed because of the label | |
362 | @ introduced by the USER macro | |
363 | .ifnc \cond,al | |
364 | .if \rept == 1 | |
365 | itt \cond | |
366 | .elseif \rept == 2 | |
367 | ittt \cond | |
368 | .else | |
369 | .error "Unsupported rept macro argument" | |
370 | .endif | |
371 | .endif | |
372 | ||
373 | @ Slightly optimised to avoid incrementing the pointer twice | |
374 | usraccoff \instr, \reg, \ptr, \inc, 0, \cond, \abort | |
375 | .if \rept == 2 | |
1142b71d | 376 | usraccoff \instr, \reg, \ptr, \inc, \inc, \cond, \abort |
8b592783 CM |
377 | .endif |
378 | ||
379 | add\cond \ptr, #\rept * \inc | |
380 | .endm | |
381 | ||
382 | #else /* !CONFIG_THUMB2_KERNEL */ | |
383 | ||
4e7682d0 | 384 | .macro usracc, instr, reg, ptr, inc, cond, rept, abort, t=TUSER() |
8b592783 CM |
385 | .rept \rept |
386 | 9999: | |
387 | .if \inc == 1 | |
247055aa | 388 | \instr\cond\()b\()\t \reg, [\ptr], #\inc |
8b592783 | 389 | .elseif \inc == 4 |
247055aa | 390 | \instr\cond\()\t \reg, [\ptr], #\inc |
8b592783 CM |
391 | .else |
392 | .error "Unsupported inc macro argument" | |
393 | .endif | |
394 | ||
4260415f | 395 | .pushsection __ex_table,"a" |
8b592783 CM |
396 | .align 3 |
397 | .long 9999b, \abort | |
4260415f | 398 | .popsection |
8b592783 CM |
399 | .endr |
400 | .endm | |
401 | ||
402 | #endif /* CONFIG_THUMB2_KERNEL */ | |
403 | ||
404 | .macro strusr, reg, ptr, inc, cond=al, rept=1, abort=9001f | |
405 | usracc str, \reg, \ptr, \inc, \cond, \rept, \abort | |
406 | .endm | |
407 | ||
408 | .macro ldrusr, reg, ptr, inc, cond=al, rept=1, abort=9001f | |
409 | usracc ldr, \reg, \ptr, \inc, \cond, \rept, \abort | |
410 | .endm | |
8f51965e DM |
411 | |
412 | /* Utility macro for declaring string literals */ | |
413 | .macro string name:req, string | |
414 | .type \name , #object | |
415 | \name: | |
416 | .asciz "\string" | |
417 | .size \name , . - \name | |
418 | .endm | |
419 | ||
8404663f RK |
420 | .macro check_uaccess, addr:req, size:req, limit:req, tmp:req, bad:req |
421 | #ifndef CONFIG_CPU_USE_DOMAINS | |
422 | adds \tmp, \addr, #\size - 1 | |
423 | sbcccs \tmp, \tmp, \limit | |
424 | bcs \bad | |
425 | #endif | |
426 | .endm | |
427 | ||
2bc58a6f | 428 | #endif /* __ASM_ASSEMBLER_H__ */ |