sh: Fix up ftrace build error when STACK_DEBUG=n.
[linux-2.6/kvm.git] / arch / sh / lib / mcount.S
blob9e397aafc16536075b2c4ca0cb7f3ee229f07663
1 /*
2  * arch/sh/lib/mcount.S
3  *
4  *  Copyright (C) 2008  Paul Mundt
5  *  Copyright (C) 2008, 2009  Matt Fleming
6  *
7  * This file is subject to the terms and conditions of the GNU General Public
8  * License.  See the file "COPYING" in the main directory of this archive
9  * for more details.
10  */
11 #include <asm/ftrace.h>
12 #include <asm/thread_info.h>
13 #include <asm/asm-offsets.h>
15 #define MCOUNT_ENTER()          \
16         mov.l   r4, @-r15;      \
17         mov.l   r5, @-r15;      \
18         mov.l   r6, @-r15;      \
19         mov.l   r7, @-r15;      \
20         sts.l   pr, @-r15;      \
21                                 \
22         mov.l   @(20,r15),r4;   \
23         sts     pr, r5
25 #define MCOUNT_LEAVE()          \
26         lds.l   @r15+, pr;      \
27         mov.l   @r15+, r7;      \
28         mov.l   @r15+, r6;      \
29         mov.l   @r15+, r5;      \
30         rts;                    \
31          mov.l  @r15+, r4
33 #ifdef CONFIG_STACK_DEBUG
35  * Perform diagnostic checks on the state of the kernel stack.
36  *
37  * Check for stack overflow. If there is less than 1KB free
38  * then it has overflowed.
39  *
40  * Make sure the stack pointer contains a valid address. Valid
41  * addresses for kernel stacks are anywhere after the bss
42  * (after _ebss) and anywhere in init_thread_union (init_stack).
43  */
44 #define STACK_CHECK()                                   \
45         mov     #(THREAD_SIZE >> 10), r0;               \
46         shll8   r0;                                     \
47         shll2   r0;                                     \
48                                                         \
49         /* r1 = sp & (THREAD_SIZE - 1) */               \
50         mov     #-1, r1;                                \
51         add     r0, r1;                                 \
52         and     r15, r1;                                \
53                                                         \
54         mov     #TI_SIZE, r3;                           \
55         mov     #(STACK_WARN >> 8), r2;                 \
56         shll8   r2;                                     \
57         add     r3, r2;                                 \
58                                                         \
59         /* Is the stack overflowing? */                 \
60         cmp/hi  r2, r1;                                 \
61         bf      stack_panic;                            \
62                                                         \
63         /* If sp > _ebss then we're OK. */              \
64         mov.l   .L_ebss, r1;                            \
65         cmp/hi  r1, r15;                                \
66         bt      1f;                                     \
67                                                         \
68         /* If sp < init_stack, we're not OK. */         \
69         mov.l   .L_init_thread_union, r1;               \
70         cmp/hs  r1, r15;                                \
71         bf      stack_panic;                            \
72                                                         \
73         /* If sp > init_stack && sp < _ebss, not OK. */ \
74         add     r0, r1;                                 \
75         cmp/hs  r1, r15;                                \
76         bt      stack_panic;                            \
78 #else
79 #define STACK_CHECK()
80 #endif /* CONFIG_STACK_DEBUG */
82         .align 2
83         .globl  _mcount
84         .type   _mcount,@function
85         .globl  mcount
86         .type   mcount,@function
87 _mcount:
88 mcount:
89 #ifndef CONFIG_DYNAMIC_FTRACE
90         mov.l   .Lfunction_trace_stop, r0
91         mov.l   @r0, r0
92         tst     r0, r0
93         bf      ftrace_stub
94 #endif
95         STACK_CHECK()
97         MCOUNT_ENTER()
99 #ifdef CONFIG_DYNAMIC_FTRACE
100         .globl  mcount_call
101 mcount_call:
102         mov.l   .Lftrace_stub, r6
103 #else
104         mov.l   .Lftrace_trace_function, r6
105         mov.l   ftrace_stub, r7
106         cmp/eq  r6, r7
107         bt      skip_trace
108         mov.l   @r6, r6
109 #endif
111         jsr     @r6
112          nop
114 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
115         mov.l   .Lftrace_graph_return, r6
116         mov.l   .Lftrace_stub, r7
117         cmp/eq  r6, r7
118         bt      1f
120         mov.l   .Lftrace_graph_caller, r0
121         jmp     @r0
122          nop
125         mov.l   .Lftrace_graph_entry, r6
126         mov.l   .Lftrace_graph_entry_stub, r7
127         cmp/eq  r6, r7
128         bt      skip_trace
130         mov.l   .Lftrace_graph_caller, r0
131         jmp     @r0
132          nop
134         .align 2
135 .Lftrace_graph_return:
136         .long   ftrace_graph_return
137 .Lftrace_graph_entry:
138         .long   ftrace_graph_entry
139 .Lftrace_graph_entry_stub:
140         .long   ftrace_graph_entry_stub
141 .Lftrace_graph_caller:
142         .long   ftrace_graph_caller
143 #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
145         .globl skip_trace
146 skip_trace:
147         MCOUNT_LEAVE()
149         .align 2
150 .Lftrace_trace_function:
151         .long   ftrace_trace_function
153 #ifdef CONFIG_DYNAMIC_FTRACE
154 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
156  * NOTE: Do not move either ftrace_graph_call or ftrace_caller
157  * as this will affect the calculation of GRAPH_INSN_OFFSET.
158  */
159         .globl ftrace_graph_call
160 ftrace_graph_call:
161         mov.l   .Lskip_trace, r0
162         jmp     @r0
163          nop
165         .align 2
166 .Lskip_trace:
167         .long   skip_trace
168 #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
170         .globl ftrace_caller
171 ftrace_caller:
172         mov.l   .Lfunction_trace_stop, r0
173         mov.l   @r0, r0
174         tst     r0, r0
175         bf      ftrace_stub
177         STACK_CHECK()
179         MCOUNT_ENTER()
181         .globl ftrace_call
182 ftrace_call:
183         mov.l   .Lftrace_stub, r6
184         jsr     @r6
185          nop
187 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
188         bra     ftrace_graph_call
189          nop
190 #else
191         MCOUNT_LEAVE()
192 #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
193 #endif /* CONFIG_DYNAMIC_FTRACE */
195         .align 2
196 .Lfunction_trace_stop:
197         .long   function_trace_stop
200  * NOTE: From here on the locations of the .Lftrace_stub label and
201  * ftrace_stub itself are fixed. Adding additional data here will skew
202  * the displacement for the memory table and break the block replacement.
203  * Place new labels either after the ftrace_stub body, or before
204  * ftrace_caller. You have been warned.
205  */
206 .Lftrace_stub:
207         .long   ftrace_stub
209         .globl  ftrace_stub
210 ftrace_stub:
211         rts
212          nop
214 #ifdef CONFIG_STACK_DEBUG
215         .globl  stack_panic
216 stack_panic:
217         mov.l   .Ldump_stack, r0
218         jsr     @r0
219          nop
221         mov.l   .Lpanic, r0
222         jsr     @r0
223          mov.l  .Lpanic_s, r4
225         rts
226          nop
228         .align 2
229 .L_ebss:
230         .long   _ebss
231 .L_init_thread_union:
232         .long   init_thread_union
233 .Lpanic:
234         .long   panic
235 .Lpanic_s:
236         .long   .Lpanic_str
237 .Ldump_stack:
238         .long   dump_stack
240         .section        .rodata
241         .align 2
242 .Lpanic_str:
243         .string "Stack error"
244 #endif /* CONFIG_STACK_DEBUG */
246 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
247         .globl  ftrace_graph_caller
248 ftrace_graph_caller:
249         mov.l   2f, r0
250         mov.l   @r0, r0
251         tst     r0, r0
252         bt      1f
254         mov.l   3f, r1
255         jmp     @r1
256          nop
258         /*
259          * MCOUNT_ENTER() pushed 5 registers onto the stack, so
260          * the stack address containing our return address is
261          * r15 + 20.
262          */
263         mov     #20, r0
264         add     r15, r0
265         mov     r0, r4
267         mov.l   .Lprepare_ftrace_return, r0
268         jsr     @r0
269          nop
271         MCOUNT_LEAVE()
273         .align 2
274 2:      .long   function_trace_stop
275 3:      .long   skip_trace
276 .Lprepare_ftrace_return:
277         .long   prepare_ftrace_return
279         .globl  return_to_handler
280 return_to_handler:
281         /*
282          * Save the return values.
283          */
284         mov.l   r0, @-r15
285         mov.l   r1, @-r15
287         mov     #0, r4
289         mov.l   .Lftrace_return_to_handler, r0
290         jsr     @r0
291          nop
293         /*
294          * The return value from ftrace_return_handler has the real
295          * address that we should return to.
296          */
297         lds     r0, pr
298         mov.l   @r15+, r1
299         rts
300          mov.l  @r15+, r0
303         .align 2
304 .Lftrace_return_to_handler:
305         .long   ftrace_return_to_handler
306 #endif /* CONFIG_FUNCTION_GRAPH_TRACER */