Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | /* |
2 | * linux/arch/arm/vfp/vfphw.S | |
3 | * | |
4 | * Copyright (C) 2004 ARM Limited. | |
5 | * Written by Deep Blue Solutions Limited. | |
6 | * | |
7 | * This program is free software; you can redistribute it and/or modify | |
8 | * it under the terms of the GNU General Public License version 2 as | |
9 | * published by the Free Software Foundation. | |
10 | * | |
11 | * This code is called from the kernel's undefined instruction trap. | |
12 | * r9 holds the return address for successful handling. | |
13 | * lr holds the return address for unrecognised instructions. | |
14 | * r10 points at the start of the private FP workspace in the thread structure | |
15 | * sp points to a struct pt_regs (as defined in include/asm/proc/ptrace.h) | |
16 | */ | |
17 | #include <asm/thread_info.h> | |
18 | #include <asm/vfpmacros.h> | |
19 | #include "../kernel/entry-header.S" | |
20 | ||
21 | .macro DBGSTR, str | |
22 | #ifdef DEBUG | |
23 | stmfd sp!, {r0-r3, ip, lr} | |
24 | add r0, pc, #4 | |
25 | bl printk | |
26 | b 1f | |
27 | .asciz "<7>VFP: \str\n" | |
28 | .balign 4 | |
29 | 1: ldmfd sp!, {r0-r3, ip, lr} | |
30 | #endif | |
31 | .endm | |
32 | ||
33 | .macro DBGSTR1, str, arg | |
34 | #ifdef DEBUG | |
35 | stmfd sp!, {r0-r3, ip, lr} | |
36 | mov r1, \arg | |
37 | add r0, pc, #4 | |
38 | bl printk | |
39 | b 1f | |
40 | .asciz "<7>VFP: \str\n" | |
41 | .balign 4 | |
42 | 1: ldmfd sp!, {r0-r3, ip, lr} | |
43 | #endif | |
44 | .endm | |
45 | ||
46 | .macro DBGSTR3, str, arg1, arg2, arg3 | |
47 | #ifdef DEBUG | |
48 | stmfd sp!, {r0-r3, ip, lr} | |
49 | mov r3, \arg3 | |
50 | mov r2, \arg2 | |
51 | mov r1, \arg1 | |
52 | add r0, pc, #4 | |
53 | bl printk | |
54 | b 1f | |
55 | .asciz "<7>VFP: \str\n" | |
56 | .balign 4 | |
57 | 1: ldmfd sp!, {r0-r3, ip, lr} | |
58 | #endif | |
59 | .endm | |
60 | ||
61 | ||
62 | @ VFP hardware support entry point. | |
63 | @ | |
64 | @ r0 = faulted instruction | |
65 | @ r2 = faulted PC+4 | |
66 | @ r9 = successful return | |
67 | @ r10 = vfp_state union | |
c6428464 | 68 | @ r11 = CPU number |
1da177e4 LT |
69 | @ lr = failure return |
70 | ||
93ed3970 | 71 | ENTRY(vfp_support_entry) |
1da177e4 LT |
72 | DBGSTR3 "instr %08x pc %08x state %p", r0, r2, r10 |
73 | ||
74 | VFPFMRX r1, FPEXC @ Is the VFP enabled? | |
75 | DBGSTR1 "fpexc %08x", r1 | |
228adef1 | 76 | tst r1, #FPEXC_EN |
1da177e4 LT |
77 | bne look_for_VFP_exceptions @ VFP is already enabled |
78 | ||
79 | DBGSTR1 "enable %x", r10 | |
80 | ldr r3, last_VFP_context_address | |
228adef1 | 81 | orr r1, r1, #FPEXC_EN @ user FPEXC has the enable bit set |
c6428464 | 82 | ldr r4, [r3, r11, lsl #2] @ last_VFP_context pointer |
228adef1 | 83 | bic r5, r1, #FPEXC_EX @ make sure exceptions are disabled |
1da177e4 LT |
84 | cmp r4, r10 |
85 | beq check_for_exception @ we are returning to the same | |
86 | @ process, so the registers are | |
87 | @ still there. In this case, we do | |
88 | @ not want to drop a pending exception. | |
89 | ||
90 | VFPFMXR FPEXC, r5 @ enable VFP, disable any pending | |
91 | @ exceptions, so we can get at the | |
92 | @ rest of it | |
93 | ||
c6428464 | 94 | #ifndef CONFIG_SMP |
1da177e4 | 95 | @ Save out the current registers to the old thread state |
c6428464 | 96 | @ No need for SMP since this is not done lazily |
1da177e4 LT |
97 | |
98 | DBGSTR1 "save old state %p", r4 | |
99 | cmp r4, #0 | |
100 | beq no_old_VFP_process | |
25ebee02 | 101 | VFPFSTMIA r4, r5 @ save the working registers |
1da177e4 | 102 | VFPFMRX r5, FPSCR @ current status |
c98929c0 | 103 | tst r1, #FPEXC_EX @ is there additional state to save? |
24b647a0 CM |
104 | beq 1f |
105 | VFPFMRX r6, FPINST @ FPINST (only if FPEXC.EX is set) | |
106 | tst r1, #FPEXC_FP2V @ is there an FPINST2 to read? | |
107 | beq 1f | |
108 | VFPFMRX r8, FPINST2 @ FPINST2 if needed (and present) | |
109 | 1: | |
1da177e4 LT |
110 | stmia r4, {r1, r5, r6, r8} @ save FPEXC, FPSCR, FPINST, FPINST2 |
111 | @ and point r4 at the word at the | |
112 | @ start of the register dump | |
c6428464 | 113 | #endif |
1da177e4 LT |
114 | |
115 | no_old_VFP_process: | |
116 | DBGSTR1 "load state %p", r10 | |
c6428464 | 117 | str r10, [r3, r11, lsl #2] @ update the last_VFP_context pointer |
1da177e4 | 118 | @ Load the saved state back into the VFP |
25ebee02 | 119 | VFPFLDMIA r10, r5 @ reload the working registers while |
1da177e4 | 120 | @ FPEXC is in a safe state |
80ed3547 | 121 | ldmia r10, {r1, r5, r6, r8} @ load FPEXC, FPSCR, FPINST, FPINST2 |
c98929c0 | 122 | tst r1, #FPEXC_EX @ is there additional state to restore? |
24b647a0 CM |
123 | beq 1f |
124 | VFPFMXR FPINST, r6 @ restore FPINST (only if FPEXC.EX is set) | |
125 | tst r1, #FPEXC_FP2V @ is there an FPINST2 to write? | |
126 | beq 1f | |
127 | VFPFMXR FPINST2, r8 @ FPINST2 if needed (and present) | |
128 | 1: | |
1da177e4 LT |
129 | VFPFMXR FPSCR, r5 @ restore status |
130 | ||
131 | check_for_exception: | |
228adef1 | 132 | tst r1, #FPEXC_EX |
1da177e4 LT |
133 | bne process_exception @ might as well handle the pending |
134 | @ exception before retrying branch | |
135 | @ out before setting an FPEXC that | |
136 | @ stops us reading stuff | |
137 | VFPFMXR FPEXC, r1 @ restore FPEXC last | |
138 | sub r2, r2, #4 | |
139 | str r2, [sp, #S_PC] @ retry the instruction | |
f2255be8 GD |
140 | #ifdef CONFIG_PREEMPT |
141 | get_thread_info r10 | |
142 | ldr r4, [r10, #TI_PREEMPT] @ get preempt count | |
143 | sub r11, r4, #1 @ decrement it | |
144 | str r11, [r10, #TI_PREEMPT] | |
145 | #endif | |
1da177e4 LT |
146 | mov pc, r9 @ we think we have handled things |
147 | ||
148 | ||
149 | look_for_VFP_exceptions: | |
c98929c0 CM |
150 | @ Check for synchronous or asynchronous exception |
151 | tst r1, #FPEXC_EX | FPEXC_DEX | |
1da177e4 | 152 | bne process_exception |
c98929c0 CM |
153 | @ On some implementations of the VFP subarch 1, setting FPSCR.IXE |
154 | @ causes all the CDP instructions to be bounced synchronously without | |
155 | @ setting the FPEXC.EX bit | |
1da177e4 | 156 | VFPFMRX r5, FPSCR |
c98929c0 | 157 | tst r5, #FPSCR_IXE |
1da177e4 LT |
158 | bne process_exception |
159 | ||
160 | @ Fall into hand on to next handler - appropriate coproc instr | |
161 | @ not recognised by VFP | |
162 | ||
163 | DBGSTR "not VFP" | |
f2255be8 GD |
164 | #ifdef CONFIG_PREEMPT |
165 | get_thread_info r10 | |
166 | ldr r4, [r10, #TI_PREEMPT] @ get preempt count | |
167 | sub r11, r4, #1 @ decrement it | |
168 | str r11, [r10, #TI_PREEMPT] | |
169 | #endif | |
1da177e4 LT |
170 | mov pc, lr |
171 | ||
172 | process_exception: | |
173 | DBGSTR "bounce" | |
1da177e4 LT |
174 | mov r2, sp @ nothing stacked - regdump is at TOS |
175 | mov lr, r9 @ setup for a return to the user code. | |
176 | ||
177 | @ Now call the C code to package up the bounce to the support code | |
178 | @ r0 holds the trigger instruction | |
179 | @ r1 holds the FPEXC value | |
180 | @ r2 pointer to register dump | |
c98929c0 | 181 | b VFP_bounce @ we have handled this - the support |
1da177e4 LT |
182 | @ code will raise an exception if |
183 | @ required. If not, the user code will | |
184 | @ retry the faulted instruction | |
93ed3970 | 185 | ENDPROC(vfp_support_entry) |
1da177e4 | 186 | |
93ed3970 | 187 | ENTRY(vfp_save_state) |
c6428464 CM |
188 | @ Save the current VFP state |
189 | @ r0 - save location | |
190 | @ r1 - FPEXC | |
191 | DBGSTR1 "save VFP state %p", r0 | |
25ebee02 | 192 | VFPFSTMIA r0, r2 @ save the working registers |
c6428464 | 193 | VFPFMRX r2, FPSCR @ current status |
c98929c0 | 194 | tst r1, #FPEXC_EX @ is there additional state to save? |
24b647a0 CM |
195 | beq 1f |
196 | VFPFMRX r3, FPINST @ FPINST (only if FPEXC.EX is set) | |
197 | tst r1, #FPEXC_FP2V @ is there an FPINST2 to read? | |
198 | beq 1f | |
199 | VFPFMRX r12, FPINST2 @ FPINST2 if needed (and present) | |
200 | 1: | |
c6428464 CM |
201 | stmia r0, {r1, r2, r3, r12} @ save FPEXC, FPSCR, FPINST, FPINST2 |
202 | mov pc, lr | |
93ed3970 | 203 | ENDPROC(vfp_save_state) |
c6428464 | 204 | |
1da177e4 LT |
205 | last_VFP_context_address: |
206 | .word last_VFP_context | |
207 | ||
93ed3970 | 208 | ENTRY(vfp_get_float) |
1da177e4 LT |
209 | add pc, pc, r0, lsl #3 |
210 | mov r0, r0 | |
211 | .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15 | |
212 | mrc p10, 0, r0, c\dr, c0, 0 @ fmrs r0, s0 | |
213 | mov pc, lr | |
214 | mrc p10, 0, r0, c\dr, c0, 4 @ fmrs r0, s1 | |
215 | mov pc, lr | |
216 | .endr | |
93ed3970 | 217 | ENDPROC(vfp_get_float) |
1da177e4 | 218 | |
93ed3970 | 219 | ENTRY(vfp_put_float) |
0355b3e0 | 220 | add pc, pc, r1, lsl #3 |
1da177e4 LT |
221 | mov r0, r0 |
222 | .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15 | |
0355b3e0 | 223 | mcr p10, 0, r0, c\dr, c0, 0 @ fmsr r0, s0 |
1da177e4 | 224 | mov pc, lr |
0355b3e0 | 225 | mcr p10, 0, r0, c\dr, c0, 4 @ fmsr r0, s1 |
1da177e4 LT |
226 | mov pc, lr |
227 | .endr | |
93ed3970 | 228 | ENDPROC(vfp_put_float) |
1da177e4 | 229 | |
93ed3970 | 230 | ENTRY(vfp_get_double) |
1da177e4 LT |
231 | add pc, pc, r0, lsl #3 |
232 | mov r0, r0 | |
233 | .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15 | |
1a6be26d | 234 | fmrrd r0, r1, d\dr |
1da177e4 LT |
235 | mov pc, lr |
236 | .endr | |
25ebee02 CM |
237 | #ifdef CONFIG_VFPv3 |
238 | @ d16 - d31 registers | |
239 | .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15 | |
240 | mrrc p11, 3, r0, r1, c\dr @ fmrrd r0, r1, d\dr | |
241 | mov pc, lr | |
242 | .endr | |
243 | #endif | |
1da177e4 | 244 | |
25ebee02 | 245 | @ virtual register 16 (or 32 if VFPv3) for compare with zero |
1da177e4 LT |
246 | mov r0, #0 |
247 | mov r1, #0 | |
248 | mov pc, lr | |
93ed3970 | 249 | ENDPROC(vfp_get_double) |
1da177e4 | 250 | |
93ed3970 | 251 | ENTRY(vfp_put_double) |
0355b3e0 | 252 | add pc, pc, r2, lsl #3 |
1da177e4 LT |
253 | mov r0, r0 |
254 | .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15 | |
0355b3e0 | 255 | fmdrr d\dr, r0, r1 |
1da177e4 LT |
256 | mov pc, lr |
257 | .endr | |
25ebee02 CM |
258 | #ifdef CONFIG_VFPv3 |
259 | @ d16 - d31 registers | |
260 | .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15 | |
261 | mcrr p11, 3, r1, r2, c\dr @ fmdrr r1, r2, d\dr | |
262 | mov pc, lr | |
263 | .endr | |
264 | #endif | |
93ed3970 | 265 | ENDPROC(vfp_put_double) |