Merge remote-tracking branch 'sound-asoc/for-next'
[deliverable/linux.git] / arch / arm / kernel / entry-ftrace.S
1 /*
2 * This program is free software; you can redistribute it and/or modify
3 * it under the terms of the GNU General Public License version 2 as
4 * published by the Free Software Foundation.
5 */
6
7 #include <asm/assembler.h>
8 #include <asm/ftrace.h>
9 #include <asm/unwind.h>
10 #include <asm/export.h>
11
12 #include "entry-header.S"
13
14 /*
15 * When compiling with -pg, gcc inserts a call to the mcount routine at the
16 * start of every function. In mcount, apart from the function's address (in
17 * lr), we need to get hold of the function's caller's address.
18 *
19 * Older GCCs (pre-4.4) inserted a call to a routine called mcount like this:
20 *
21 * bl mcount
22 *
23 * These versions have the limitation that in order for the mcount routine to
24 * be able to determine the function's caller's address, an APCS-style frame
25 * pointer (which is set up with something like the code below) is required.
26 *
27 * mov ip, sp
28 * push {fp, ip, lr, pc}
29 * sub fp, ip, #4
30 *
31 * With EABI, these frame pointers are not available unless -mapcs-frame is
32 * specified, and if building as Thumb-2, not even then.
33 *
34 * Newer GCCs (4.4+) solve this problem by introducing a new version of mcount,
35 * with call sites like:
36 *
37 * push {lr}
38 * bl __gnu_mcount_nc
39 *
40 * With these compilers, frame pointers are not necessary.
41 *
42 * mcount can be thought of as a function called in the middle of a subroutine
43 * call. As such, it needs to be transparent for both the caller and the
44 * callee: the original lr needs to be restored when leaving mcount, and no
45 * registers should be clobbered. (In the __gnu_mcount_nc implementation, we
46 * clobber the ip register. This is OK because the ARM calling convention
47 * allows it to be clobbered in subroutines and doesn't use it to hold
48 * parameters.)
49 *
50 * When using dynamic ftrace, we patch out the mcount call by a "mov r0, r0"
51 * for the mcount case, and a "pop {lr}" for the __gnu_mcount_nc case (see
52 * arch/arm/kernel/ftrace.c).
53 */
54
55 #ifndef CONFIG_OLD_MCOUNT
56 #if (__GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 4))
57 #error Ftrace requires CONFIG_FRAME_POINTER=y with GCC older than 4.4.0.
58 #endif
59 #endif
60
61 .macro mcount_adjust_addr rd, rn
62 bic \rd, \rn, #1 @ clear the Thumb bit if present
63 sub \rd, \rd, #MCOUNT_INSN_SIZE
64 .endm
65
66 .macro __mcount suffix
67 mcount_enter
68 ldr r0, =ftrace_trace_function
69 ldr r2, [r0]
70 adr r0, .Lftrace_stub
71 cmp r0, r2
72 bne 1f
73
74 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
75 ldr r1, =ftrace_graph_return
76 ldr r2, [r1]
77 cmp r0, r2
78 bne ftrace_graph_caller\suffix
79
80 ldr r1, =ftrace_graph_entry
81 ldr r2, [r1]
82 ldr r0, =ftrace_graph_entry_stub
83 cmp r0, r2
84 bne ftrace_graph_caller\suffix
85 #endif
86
87 mcount_exit
88
89 1: mcount_get_lr r1 @ lr of instrumented func
90 mcount_adjust_addr r0, lr @ instrumented function
91 badr lr, 2f
92 mov pc, r2
93 2: mcount_exit
94 .endm
95
96 .macro __ftrace_caller suffix
97 mcount_enter
98
99 mcount_get_lr r1 @ lr of instrumented func
100 mcount_adjust_addr r0, lr @ instrumented function
101
102 .globl ftrace_call\suffix
103 ftrace_call\suffix:
104 bl ftrace_stub
105
106 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
107 .globl ftrace_graph_call\suffix
108 ftrace_graph_call\suffix:
109 mov r0, r0
110 #endif
111
112 mcount_exit
113 .endm
114
115 .macro __ftrace_graph_caller
116 sub r0, fp, #4 @ &lr of instrumented routine (&parent)
117 #ifdef CONFIG_DYNAMIC_FTRACE
118 @ called from __ftrace_caller, saved in mcount_enter
119 ldr r1, [sp, #16] @ instrumented routine (func)
120 mcount_adjust_addr r1, r1
121 #else
122 @ called from __mcount, untouched in lr
123 mcount_adjust_addr r1, lr @ instrumented routine (func)
124 #endif
125 mov r2, fp @ frame pointer
126 bl prepare_ftrace_return
127 mcount_exit
128 .endm
129
130 #ifdef CONFIG_OLD_MCOUNT
131 /*
132 * mcount
133 */
134
135 .macro mcount_enter
136 stmdb sp!, {r0-r3, lr}
137 .endm
138
139 .macro mcount_get_lr reg
140 ldr \reg, [fp, #-4]
141 .endm
142
143 .macro mcount_exit
144 ldr lr, [fp, #-4]
145 ldmia sp!, {r0-r3, pc}
146 .endm
147
148 ENTRY(mcount)
149 #ifdef CONFIG_DYNAMIC_FTRACE
150 stmdb sp!, {lr}
151 ldr lr, [fp, #-4]
152 ldmia sp!, {pc}
153 #else
154 __mcount _old
155 #endif
156 ENDPROC(mcount)
157 EXPORT_SYMBOL(mcount)
158
159 #ifdef CONFIG_DYNAMIC_FTRACE
160 ENTRY(ftrace_caller_old)
161 __ftrace_caller _old
162 ENDPROC(ftrace_caller_old)
163 #endif
164
165 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
166 ENTRY(ftrace_graph_caller_old)
167 __ftrace_graph_caller
168 ENDPROC(ftrace_graph_caller_old)
169 #endif
170
171 .purgem mcount_enter
172 .purgem mcount_get_lr
173 .purgem mcount_exit
174 #endif
175
176 /*
177 * __gnu_mcount_nc
178 */
179
180 .macro mcount_enter
181 /*
182 * This pad compensates for the push {lr} at the call site. Note that we are
183 * unable to unwind through a function which does not otherwise save its lr.
184 */
185 UNWIND(.pad #4)
186 stmdb sp!, {r0-r3, lr}
187 UNWIND(.save {r0-r3, lr})
188 .endm
189
190 .macro mcount_get_lr reg
191 ldr \reg, [sp, #20]
192 .endm
193
194 .macro mcount_exit
195 ldmia sp!, {r0-r3, ip, lr}
196 ret ip
197 .endm
198
199 ENTRY(__gnu_mcount_nc)
200 UNWIND(.fnstart)
201 #ifdef CONFIG_DYNAMIC_FTRACE
202 mov ip, lr
203 ldmia sp!, {lr}
204 ret ip
205 #else
206 __mcount
207 #endif
208 UNWIND(.fnend)
209 ENDPROC(__gnu_mcount_nc)
210 EXPORT_SYMBOL(__gnu_mcount_nc)
211
212 #ifdef CONFIG_DYNAMIC_FTRACE
213 ENTRY(ftrace_caller)
214 UNWIND(.fnstart)
215 __ftrace_caller
216 UNWIND(.fnend)
217 ENDPROC(ftrace_caller)
218 #endif
219
220 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
221 ENTRY(ftrace_graph_caller)
222 UNWIND(.fnstart)
223 __ftrace_graph_caller
224 UNWIND(.fnend)
225 ENDPROC(ftrace_graph_caller)
226 #endif
227
228 .purgem mcount_enter
229 .purgem mcount_get_lr
230 .purgem mcount_exit
231
232 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
233 .globl return_to_handler
234 return_to_handler:
235 stmdb sp!, {r0-r3}
236 mov r0, fp @ frame pointer
237 bl ftrace_return_to_handler
238 mov lr, r0 @ r0 has real ret addr
239 ldmia sp!, {r0-r3}
240 ret lr
241 #endif
242
243 ENTRY(ftrace_stub)
244 .Lftrace_stub:
245 ret lr
246 ENDPROC(ftrace_stub)
This page took 0.276891 seconds and 5 git commands to generate.