ftrace/x86: Rename MCOUNT_SAVE_FRAME and add more detailed comments
[deliverable/linux.git] / arch / x86 / kernel / mcount_64.S
CommitLineData
e18eead3
SR
1/*
2 * linux/arch/x86_64/mcount_64.S
3 *
4 * Copyright (C) 2014 Steven Rostedt, Red Hat Inc
5 */
6
7#include <linux/linkage.h>
8#include <asm/ptrace.h>
9#include <asm/ftrace.h>
10
11
12 .code64
13 .section .entry.text, "ax"
14
15
16#ifdef CONFIG_FUNCTION_TRACER
17
18#ifdef CC_USING_FENTRY
19# define function_hook __fentry__
20#else
21# define function_hook mcount
22#endif
23
05df710e
SRRH
24/*
25 * gcc -pg option adds a call to 'mcount' in most functions.
26 * When -mfentry is used, the call is to 'fentry' and not 'mcount'
27 * and is done before the function's stack frame is set up.
28 * They both require a set of regs to be saved before calling
29 * any C code and restored before returning back to the function.
30 *
31 * On boot up, all these calls are converted into nops. When tracing
32 * is enabled, the call can jump to either ftrace_caller or
33 * ftrace_regs_caller. Callbacks (tracing functions) that require
34 * ftrace_regs_caller (like kprobes) need to have pt_regs passed to
35 * it. For this reason, the size of the pt_regs structure will be
36 * allocated on the stack and the required mcount registers will
37 * be saved in the locations that pt_regs has them in.
38 */
39
4bcdf152 40/* skip is set if the stack was already partially adjusted */
05df710e 41.macro save_mcount_regs skip=0
4bcdf152
SRRH
42 /*
43 * We add enough stack to save all regs.
44 */
45 subq $(SS+8-\skip), %rsp
46 movq %rax, RAX(%rsp)
47 movq %rcx, RCX(%rsp)
48 movq %rdx, RDX(%rsp)
49 movq %rsi, RSI(%rsp)
50 movq %rdi, RDI(%rsp)
51 movq %r8, R8(%rsp)
52 movq %r9, R9(%rsp)
53 /* Move RIP to its proper location */
54 movq SS+8(%rsp), %rdx
55 movq %rdx, RIP(%rsp)
56 .endm
57
05df710e 58.macro restore_mcount_regs skip=0
4bcdf152
SRRH
59 movq R9(%rsp), %r9
60 movq R8(%rsp), %r8
61 movq RDI(%rsp), %rdi
62 movq RSI(%rsp), %rsi
63 movq RDX(%rsp), %rdx
64 movq RCX(%rsp), %rcx
65 movq RAX(%rsp), %rax
66 addq $(SS+8-\skip), %rsp
67 .endm
68
e18eead3 69/* skip is set if stack has been adjusted */
f3bea491 70.macro ftrace_caller_setup trace_label skip=0
05df710e 71 save_mcount_regs \skip
e18eead3 72
f3bea491
SRRH
73 /* Save this location */
74GLOBAL(\trace_label)
e18eead3
SR
75 /* Load the ftrace_ops into the 3rd parameter */
76 movq function_trace_op(%rip), %rdx
77
78 /* Load ip into the first parameter */
79 movq RIP(%rsp), %rdi
80 subq $MCOUNT_INSN_SIZE, %rdi
81 /* Load the parent_ip into the second parameter */
82#ifdef CC_USING_FENTRY
83 movq SS+16(%rsp), %rsi
84#else
85 movq 8(%rbp), %rsi
86#endif
87.endm
88
76c2f13c
SRRH
89#ifdef CONFIG_DYNAMIC_FTRACE
90
91ENTRY(function_hook)
92 retq
93END(function_hook)
94
9960efeb
SRRH
95#ifdef CONFIG_FRAME_POINTER
96/*
97 * Stack traces will stop at the ftrace trampoline if the frame pointer
98 * is not set up properly. If fentry is used, we need to save a frame
99 * pointer for the parent as well as the function traced, because the
100 * fentry is called before the stack frame is set up, where as mcount
101 * is called afterward.
102 */
103.macro create_frame parent rip
104#ifdef CC_USING_FENTRY
105 pushq \parent
106 pushq %rbp
107 movq %rsp, %rbp
108#endif
109 pushq \rip
110 pushq %rbp
111 movq %rsp, %rbp
112.endm
113
114.macro restore_frame
115#ifdef CC_USING_FENTRY
116 addq $16, %rsp
117#endif
118 popq %rbp
119 addq $8, %rsp
120.endm
121#else
122.macro create_frame parent rip
123.endm
124.macro restore_frame
125.endm
126#endif /* CONFIG_FRAME_POINTER */
127
e18eead3 128ENTRY(ftrace_caller)
f3bea491 129 ftrace_caller_setup ftrace_caller_op_ptr
e18eead3
SR
130 /* regs go into 4th parameter (but make it NULL) */
131 movq $0, %rcx
132
9960efeb
SRRH
133 create_frame %rsi, %rdi
134
e18eead3
SR
135GLOBAL(ftrace_call)
136 call ftrace_stub
137
9960efeb
SRRH
138 restore_frame
139
05df710e 140 restore_mcount_regs
f3bea491
SRRH
141
142 /*
143 * The copied trampoline must call ftrace_return as it
144 * still may need to call the function graph tracer.
145 */
146GLOBAL(ftrace_caller_end)
147
148GLOBAL(ftrace_return)
e18eead3
SR
149
150#ifdef CONFIG_FUNCTION_GRAPH_TRACER
151GLOBAL(ftrace_graph_call)
152 jmp ftrace_stub
153#endif
154
155GLOBAL(ftrace_stub)
156 retq
157END(ftrace_caller)
158
159ENTRY(ftrace_regs_caller)
160 /* Save the current flags before compare (in SS location)*/
161 pushfq
162
e18eead3 163 /* skip=8 to skip flags saved in SS */
f3bea491 164 ftrace_caller_setup ftrace_regs_caller_op_ptr 8
e18eead3
SR
165
166 /* Save the rest of pt_regs */
167 movq %r15, R15(%rsp)
168 movq %r14, R14(%rsp)
169 movq %r13, R13(%rsp)
170 movq %r12, R12(%rsp)
171 movq %r11, R11(%rsp)
172 movq %r10, R10(%rsp)
173 movq %rbp, RBP(%rsp)
174 movq %rbx, RBX(%rsp)
175 /* Copy saved flags */
176 movq SS(%rsp), %rcx
177 movq %rcx, EFLAGS(%rsp)
178 /* Kernel segments */
179 movq $__KERNEL_DS, %rcx
180 movq %rcx, SS(%rsp)
181 movq $__KERNEL_CS, %rcx
182 movq %rcx, CS(%rsp)
183 /* Stack - skipping return address */
184 leaq SS+16(%rsp), %rcx
185 movq %rcx, RSP(%rsp)
186
187 /* regs go into 4th parameter */
188 leaq (%rsp), %rcx
189
9960efeb
SRRH
190 create_frame %rsi, %rdi
191
e18eead3
SR
192GLOBAL(ftrace_regs_call)
193 call ftrace_stub
194
9960efeb
SRRH
195 restore_frame
196
e18eead3
SR
197 /* Copy flags back to SS, to restore them */
198 movq EFLAGS(%rsp), %rax
199 movq %rax, SS(%rsp)
200
201 /* Handlers can change the RIP */
202 movq RIP(%rsp), %rax
203 movq %rax, SS+8(%rsp)
204
205 /* restore the rest of pt_regs */
206 movq R15(%rsp), %r15
207 movq R14(%rsp), %r14
208 movq R13(%rsp), %r13
209 movq R12(%rsp), %r12
210 movq R10(%rsp), %r10
211 movq RBP(%rsp), %rbp
212 movq RBX(%rsp), %rbx
213
214 /* skip=8 to skip flags saved in SS */
05df710e 215 restore_mcount_regs 8
e18eead3
SR
216
217 /* Restore flags */
218 popfq
219
f3bea491
SRRH
220 /*
221 * As this jmp to ftrace_return can be a short jump
222 * it must not be copied into the trampoline.
223 * The trampoline will add the code to jump
224 * to the return.
225 */
226GLOBAL(ftrace_regs_caller_end)
227
e18eead3 228 jmp ftrace_return
fdc841b5 229
e18eead3
SR
230 popfq
231 jmp ftrace_stub
232
233END(ftrace_regs_caller)
234
235
236#else /* ! CONFIG_DYNAMIC_FTRACE */
237
238ENTRY(function_hook)
e18eead3
SR
239 cmpq $ftrace_stub, ftrace_trace_function
240 jnz trace
241
62a207d7 242fgraph_trace:
e18eead3
SR
243#ifdef CONFIG_FUNCTION_GRAPH_TRACER
244 cmpq $ftrace_stub, ftrace_graph_return
245 jnz ftrace_graph_caller
246
247 cmpq $ftrace_graph_entry_stub, ftrace_graph_entry
248 jnz ftrace_graph_caller
249#endif
250
251GLOBAL(ftrace_stub)
252 retq
253
254trace:
76c2f13c 255 ftrace_caller_setup ftrace_caller_op_ptr
e18eead3
SR
256
257 call *ftrace_trace_function
258
05df710e 259 restore_mcount_regs
e18eead3 260
62a207d7 261 jmp fgraph_trace
e18eead3
SR
262END(function_hook)
263#endif /* CONFIG_DYNAMIC_FTRACE */
264#endif /* CONFIG_FUNCTION_TRACER */
265
266#ifdef CONFIG_FUNCTION_GRAPH_TRACER
267ENTRY(ftrace_graph_caller)
05df710e 268 save_mcount_regs
e18eead3
SR
269
270#ifdef CC_USING_FENTRY
271 leaq SS+16(%rsp), %rdi
272 movq $0, %rdx /* No framepointers needed */
273#else
274 leaq 8(%rbp), %rdi
275 movq (%rbp), %rdx
276#endif
277 movq RIP(%rsp), %rsi
278 subq $MCOUNT_INSN_SIZE, %rsi
279
280 call prepare_ftrace_return
281
05df710e 282 restore_mcount_regs
e18eead3
SR
283
284 retq
285END(ftrace_graph_caller)
286
287GLOBAL(return_to_handler)
288 subq $24, %rsp
289
290 /* Save the return values */
291 movq %rax, (%rsp)
292 movq %rdx, 8(%rsp)
293 movq %rbp, %rdi
294
295 call ftrace_return_to_handler
296
297 movq %rax, %rdi
298 movq 8(%rsp), %rdx
299 movq (%rsp), %rax
300 addq $24, %rsp
301 jmp *%rdi
302#endif
This page took 0.058275 seconds and 5 git commands to generate.