4 * Copyright (C) 2008 Paul Mundt
5 * Copyright (C) 2008, 2009 Matt Fleming
7 * This file is subject to the terms and conditions of the GNU General Public
8 * License. See the file "COPYING" in the main directory of this archive
11 #include <asm/ftrace.h>
12 #include <asm/thread_info.h>
13 #include <asm/asm-offsets.h>
15 #define MCOUNT_ENTER() \
25 #define MCOUNT_LEAVE() \
33 #ifdef CONFIG_STACK_DEBUG
35 * Perform diagnostic checks on the state of the kernel stack.
37 * Check for stack overflow. If there is less than 1KB free
38 * then it has overflowed.
40 * Make sure the stack pointer contains a valid address. Valid
41 * addresses for kernel stacks are anywhere after the bss
42 * (after _ebss) and anywhere in init_thread_union (init_stack).
44 #define STACK_CHECK() \
45 mov #(THREAD_SIZE >> 10), r0; \
49 /* r1 = sp & (THREAD_SIZE - 1) */ \
55 mov #(STACK_WARN >> 8), r2; \
59 /* Is the stack overflowing? */ \
63 /* If sp > _ebss then we're OK. */ \
68 /* If sp < init_stack, we're not OK. */ \
69 mov.l .L_init_thread_union, r1; \
73 /* If sp > init_stack && sp < _ebss, not OK. */ \
80 #endif /* CONFIG_STACK_DEBUG */
84 .type _mcount,@function
86 .type mcount,@function
89 #ifndef CONFIG_DYNAMIC_FTRACE
90 mov.l .Lfunction_trace_stop, r0
99 #ifdef CONFIG_DYNAMIC_FTRACE
102 mov.l .Lftrace_stub, r6
104 mov.l .Lftrace_trace_function, r6
105 mov.l ftrace_stub, r7
114 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
115 mov.l .Lftrace_graph_return, r6
116 mov.l .Lftrace_stub, r7
120 mov.l .Lftrace_graph_caller, r0
125 mov.l .Lftrace_graph_entry, r6
126 mov.l .Lftrace_graph_entry_stub, r7
130 mov.l .Lftrace_graph_caller, r0
135 .Lftrace_graph_return:
136 .long ftrace_graph_return
137 .Lftrace_graph_entry:
138 .long ftrace_graph_entry
139 .Lftrace_graph_entry_stub:
140 .long ftrace_graph_entry_stub
141 .Lftrace_graph_caller:
142 .long ftrace_graph_caller
143 #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
150 .Lftrace_trace_function:
151 .long ftrace_trace_function
153 #ifdef CONFIG_DYNAMIC_FTRACE
154 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
156 * NOTE: Do not move either ftrace_graph_call or ftrace_caller
157 * as this will affect the calculation of GRAPH_INSN_OFFSET.
159 .globl ftrace_graph_call
161 mov.l .Lskip_trace, r0
168 #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
172 mov.l .Lfunction_trace_stop, r0
183 mov.l .Lftrace_stub, r6
187 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
188 bra ftrace_graph_call
192 #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
193 #endif /* CONFIG_DYNAMIC_FTRACE */
196 .Lfunction_trace_stop:
197 .long function_trace_stop
200 * NOTE: From here on the locations of the .Lftrace_stub label and
201 * ftrace_stub itself are fixed. Adding additional data here will skew
202 * the displacement for the memory table and break the block replacement.
203 * Place new labels either after the ftrace_stub body, or before
204 * ftrace_caller. You have been warned.
214 #ifdef CONFIG_STACK_DEBUG
217 mov.l .Ldump_stack, r0
231 .L_init_thread_union:
232 .long init_thread_union
243 .string "Stack error"
244 #endif /* CONFIG_STACK_DEBUG */
246 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
247 .globl ftrace_graph_caller
259 * MCOUNT_ENTER() pushed 5 registers onto the stack, so
260 * the stack address containing our return address is
267 mov.l .Lprepare_ftrace_return, r0
274 2: .long function_trace_stop
276 .Lprepare_ftrace_return:
277 .long prepare_ftrace_return
279 .globl return_to_handler
282 * Save the return values.
289 mov.l .Lftrace_return_to_handler, r0
294 * The return value from ftrace_return_handler has the real
295 * address that we should return to.
304 .Lftrace_return_to_handler:
305 .long ftrace_return_to_handler
306 #endif /* CONFIG_FUNCTION_GRAPH_TRACER */