xref: /OK3568_Linux_fs/kernel/arch/sh/lib/mcount.S (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun/* SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun *
3*4882a593Smuzhiyun * arch/sh/lib/mcount.S
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun *  Copyright (C) 2008, 2009  Paul Mundt
6*4882a593Smuzhiyun *  Copyright (C) 2008, 2009  Matt Fleming
7*4882a593Smuzhiyun */
8*4882a593Smuzhiyun#include <asm/ftrace.h>
9*4882a593Smuzhiyun#include <asm/thread_info.h>
10*4882a593Smuzhiyun#include <asm/asm-offsets.h>
11*4882a593Smuzhiyun
12*4882a593Smuzhiyun#define MCOUNT_ENTER()		\
13*4882a593Smuzhiyun	mov.l	r4, @-r15;	\
14*4882a593Smuzhiyun	mov.l	r5, @-r15;	\
15*4882a593Smuzhiyun	mov.l	r6, @-r15;	\
16*4882a593Smuzhiyun	mov.l	r7, @-r15;	\
17*4882a593Smuzhiyun	sts.l	pr, @-r15;	\
18*4882a593Smuzhiyun				\
19*4882a593Smuzhiyun	mov.l	@(20,r15),r4;	\
20*4882a593Smuzhiyun	sts	pr, r5
21*4882a593Smuzhiyun
22*4882a593Smuzhiyun#define MCOUNT_LEAVE()		\
23*4882a593Smuzhiyun	lds.l	@r15+, pr;	\
24*4882a593Smuzhiyun	mov.l	@r15+, r7;	\
25*4882a593Smuzhiyun	mov.l	@r15+, r6;	\
26*4882a593Smuzhiyun	mov.l	@r15+, r5;	\
27*4882a593Smuzhiyun	rts;			\
28*4882a593Smuzhiyun	 mov.l	@r15+, r4
29*4882a593Smuzhiyun
30*4882a593Smuzhiyun#ifdef CONFIG_STACK_DEBUG
31*4882a593Smuzhiyun/*
32*4882a593Smuzhiyun * Perform diagnostic checks on the state of the kernel stack.
33*4882a593Smuzhiyun *
34*4882a593Smuzhiyun * Check for stack overflow. If there is less than 1KB free
35*4882a593Smuzhiyun * then it has overflowed.
36*4882a593Smuzhiyun *
37*4882a593Smuzhiyun * Make sure the stack pointer contains a valid address. Valid
38*4882a593Smuzhiyun * addresses for kernel stacks are anywhere after the bss
39*4882a593Smuzhiyun * (after __bss_stop) and anywhere in init_thread_union (init_stack).
40*4882a593Smuzhiyun */
41*4882a593Smuzhiyun#define STACK_CHECK()					\
42*4882a593Smuzhiyun	mov	#(THREAD_SIZE >> 10), r0;		\
43*4882a593Smuzhiyun	shll8	r0;					\
44*4882a593Smuzhiyun	shll2	r0;					\
45*4882a593Smuzhiyun							\
46*4882a593Smuzhiyun	/* r1 = sp & (THREAD_SIZE - 1) */		\
47*4882a593Smuzhiyun	mov	#-1, r1;				\
48*4882a593Smuzhiyun	add	r0, r1;					\
49*4882a593Smuzhiyun	and	r15, r1;				\
50*4882a593Smuzhiyun							\
51*4882a593Smuzhiyun	mov	#TI_SIZE, r3;				\
52*4882a593Smuzhiyun	mov	#(STACK_WARN >> 8), r2;			\
53*4882a593Smuzhiyun	shll8	r2;					\
54*4882a593Smuzhiyun	add	r3, r2;					\
55*4882a593Smuzhiyun							\
56*4882a593Smuzhiyun	/* Is the stack overflowing? */			\
57*4882a593Smuzhiyun	cmp/hi	r2, r1;					\
58*4882a593Smuzhiyun	bf	stack_panic;				\
59*4882a593Smuzhiyun							\
60*4882a593Smuzhiyun	/* If sp > __bss_stop then we're OK. */		\
61*4882a593Smuzhiyun	mov.l	.L_ebss, r1;				\
62*4882a593Smuzhiyun	cmp/hi	r1, r15;				\
63*4882a593Smuzhiyun	bt	1f;					\
64*4882a593Smuzhiyun							\
65*4882a593Smuzhiyun	/* If sp < init_stack, we're not OK. */		\
66*4882a593Smuzhiyun	mov.l	.L_init_thread_union, r1;		\
67*4882a593Smuzhiyun	cmp/hs	r1, r15;				\
68*4882a593Smuzhiyun	bf	stack_panic;				\
69*4882a593Smuzhiyun							\
70*4882a593Smuzhiyun	/* If sp > init_stack && sp < __bss_stop, not OK. */	\
71*4882a593Smuzhiyun	add	r0, r1;					\
72*4882a593Smuzhiyun	cmp/hs	r1, r15;				\
73*4882a593Smuzhiyun	bt	stack_panic;				\
74*4882a593Smuzhiyun1:
75*4882a593Smuzhiyun#else
76*4882a593Smuzhiyun#define STACK_CHECK()
77*4882a593Smuzhiyun#endif /* CONFIG_STACK_DEBUG */
78*4882a593Smuzhiyun
79*4882a593Smuzhiyun	.align 2
80*4882a593Smuzhiyun	.globl	_mcount
81*4882a593Smuzhiyun	.type	_mcount,@function
82*4882a593Smuzhiyun	.globl	mcount
83*4882a593Smuzhiyun	.type	mcount,@function
84*4882a593Smuzhiyun_mcount:
85*4882a593Smuzhiyunmcount:
86*4882a593Smuzhiyun	STACK_CHECK()
87*4882a593Smuzhiyun
88*4882a593Smuzhiyun#ifndef CONFIG_FUNCTION_TRACER
89*4882a593Smuzhiyun	rts
90*4882a593Smuzhiyun	 nop
91*4882a593Smuzhiyun#else
92*4882a593Smuzhiyun	MCOUNT_ENTER()
93*4882a593Smuzhiyun
94*4882a593Smuzhiyun#ifdef CONFIG_DYNAMIC_FTRACE
95*4882a593Smuzhiyun	.globl	mcount_call
96*4882a593Smuzhiyunmcount_call:
97*4882a593Smuzhiyun	mov.l	.Lftrace_stub, r6
98*4882a593Smuzhiyun#else
99*4882a593Smuzhiyun	mov.l	.Lftrace_trace_function, r6
100*4882a593Smuzhiyun	mov.l	ftrace_stub, r7
101*4882a593Smuzhiyun	cmp/eq	r6, r7
102*4882a593Smuzhiyun	bt	skip_trace
103*4882a593Smuzhiyun	mov.l	@r6, r6
104*4882a593Smuzhiyun#endif
105*4882a593Smuzhiyun
106*4882a593Smuzhiyun	jsr	@r6
107*4882a593Smuzhiyun	 nop
108*4882a593Smuzhiyun
109*4882a593Smuzhiyun#ifdef CONFIG_FUNCTION_GRAPH_TRACER
110*4882a593Smuzhiyun	mov.l   .Lftrace_graph_return, r6
111*4882a593Smuzhiyun	mov.l   .Lftrace_stub, r7
112*4882a593Smuzhiyun	cmp/eq  r6, r7
113*4882a593Smuzhiyun	bt      1f
114*4882a593Smuzhiyun
115*4882a593Smuzhiyun	mov.l   .Lftrace_graph_caller, r0
116*4882a593Smuzhiyun	jmp     @r0
117*4882a593Smuzhiyun	 nop
118*4882a593Smuzhiyun
119*4882a593Smuzhiyun1:
120*4882a593Smuzhiyun	mov.l	.Lftrace_graph_entry, r6
121*4882a593Smuzhiyun	mov.l	.Lftrace_graph_entry_stub, r7
122*4882a593Smuzhiyun	cmp/eq	r6, r7
123*4882a593Smuzhiyun	bt	skip_trace
124*4882a593Smuzhiyun
125*4882a593Smuzhiyun	mov.l   .Lftrace_graph_caller, r0
126*4882a593Smuzhiyun	jmp	@r0
127*4882a593Smuzhiyun	 nop
128*4882a593Smuzhiyun
129*4882a593Smuzhiyun	.align 2
130*4882a593Smuzhiyun.Lftrace_graph_return:
131*4882a593Smuzhiyun	.long   ftrace_graph_return
132*4882a593Smuzhiyun.Lftrace_graph_entry:
133*4882a593Smuzhiyun	.long   ftrace_graph_entry
134*4882a593Smuzhiyun.Lftrace_graph_entry_stub:
135*4882a593Smuzhiyun	.long   ftrace_graph_entry_stub
136*4882a593Smuzhiyun.Lftrace_graph_caller:
137*4882a593Smuzhiyun	.long   ftrace_graph_caller
138*4882a593Smuzhiyun#endif /* CONFIG_FUNCTION_GRAPH_TRACER */
139*4882a593Smuzhiyun
140*4882a593Smuzhiyun	.globl skip_trace
141*4882a593Smuzhiyunskip_trace:
142*4882a593Smuzhiyun	MCOUNT_LEAVE()
143*4882a593Smuzhiyun
144*4882a593Smuzhiyun	.align 2
145*4882a593Smuzhiyun.Lftrace_trace_function:
146*4882a593Smuzhiyun	.long   ftrace_trace_function
147*4882a593Smuzhiyun
148*4882a593Smuzhiyun#ifdef CONFIG_DYNAMIC_FTRACE
149*4882a593Smuzhiyun#ifdef CONFIG_FUNCTION_GRAPH_TRACER
150*4882a593Smuzhiyun/*
151*4882a593Smuzhiyun * NOTE: Do not move either ftrace_graph_call or ftrace_caller
152*4882a593Smuzhiyun * as this will affect the calculation of GRAPH_INSN_OFFSET.
153*4882a593Smuzhiyun */
154*4882a593Smuzhiyun	.globl ftrace_graph_call
155*4882a593Smuzhiyunftrace_graph_call:
156*4882a593Smuzhiyun	mov.l	.Lskip_trace, r0
157*4882a593Smuzhiyun	jmp	@r0
158*4882a593Smuzhiyun	 nop
159*4882a593Smuzhiyun
160*4882a593Smuzhiyun	.align 2
161*4882a593Smuzhiyun.Lskip_trace:
162*4882a593Smuzhiyun	.long	skip_trace
163*4882a593Smuzhiyun#endif /* CONFIG_FUNCTION_GRAPH_TRACER */
164*4882a593Smuzhiyun
165*4882a593Smuzhiyun	.globl ftrace_caller
166*4882a593Smuzhiyunftrace_caller:
167*4882a593Smuzhiyun	MCOUNT_ENTER()
168*4882a593Smuzhiyun
169*4882a593Smuzhiyun	.globl ftrace_call
170*4882a593Smuzhiyunftrace_call:
171*4882a593Smuzhiyun	mov.l	.Lftrace_stub, r6
172*4882a593Smuzhiyun	jsr	@r6
173*4882a593Smuzhiyun	 nop
174*4882a593Smuzhiyun
175*4882a593Smuzhiyun#ifdef CONFIG_FUNCTION_GRAPH_TRACER
176*4882a593Smuzhiyun	bra	ftrace_graph_call
177*4882a593Smuzhiyun	 nop
178*4882a593Smuzhiyun#else
179*4882a593Smuzhiyun	MCOUNT_LEAVE()
180*4882a593Smuzhiyun#endif /* CONFIG_FUNCTION_GRAPH_TRACER */
181*4882a593Smuzhiyun#endif /* CONFIG_DYNAMIC_FTRACE */
182*4882a593Smuzhiyun
183*4882a593Smuzhiyun	.align 2
184*4882a593Smuzhiyun
185*4882a593Smuzhiyun/*
186*4882a593Smuzhiyun * NOTE: From here on the locations of the .Lftrace_stub label and
187*4882a593Smuzhiyun * ftrace_stub itself are fixed. Adding additional data here will skew
188*4882a593Smuzhiyun * the displacement for the memory table and break the block replacement.
189*4882a593Smuzhiyun * Place new labels either after the ftrace_stub body, or before
190*4882a593Smuzhiyun * ftrace_caller. You have been warned.
191*4882a593Smuzhiyun */
192*4882a593Smuzhiyun.Lftrace_stub:
193*4882a593Smuzhiyun	.long	ftrace_stub
194*4882a593Smuzhiyun
195*4882a593Smuzhiyun	.globl	ftrace_stub
196*4882a593Smuzhiyunftrace_stub:
197*4882a593Smuzhiyun	rts
198*4882a593Smuzhiyun	 nop
199*4882a593Smuzhiyun
200*4882a593Smuzhiyun#ifdef CONFIG_FUNCTION_GRAPH_TRACER
201*4882a593Smuzhiyun	.globl	ftrace_graph_caller
202*4882a593Smuzhiyunftrace_graph_caller:
203*4882a593Smuzhiyun	mov.l	2f, r1
204*4882a593Smuzhiyun	jmp	@r1
205*4882a593Smuzhiyun	 nop
206*4882a593Smuzhiyun1:
207*4882a593Smuzhiyun	/*
208*4882a593Smuzhiyun	 * MCOUNT_ENTER() pushed 5 registers onto the stack, so
209*4882a593Smuzhiyun	 * the stack address containing our return address is
210*4882a593Smuzhiyun	 * r15 + 20.
211*4882a593Smuzhiyun	 */
212*4882a593Smuzhiyun	mov	#20, r0
213*4882a593Smuzhiyun	add	r15, r0
214*4882a593Smuzhiyun	mov	r0, r4
215*4882a593Smuzhiyun
216*4882a593Smuzhiyun	mov.l	.Lprepare_ftrace_return, r0
217*4882a593Smuzhiyun	jsr	@r0
218*4882a593Smuzhiyun	 nop
219*4882a593Smuzhiyun
220*4882a593Smuzhiyun	MCOUNT_LEAVE()
221*4882a593Smuzhiyun
222*4882a593Smuzhiyun	.align 2
223*4882a593Smuzhiyun2:	.long	skip_trace
224*4882a593Smuzhiyun.Lprepare_ftrace_return:
225*4882a593Smuzhiyun	.long	prepare_ftrace_return
226*4882a593Smuzhiyun
227*4882a593Smuzhiyun	.globl	return_to_handler
228*4882a593Smuzhiyunreturn_to_handler:
229*4882a593Smuzhiyun	/*
230*4882a593Smuzhiyun	 * Save the return values.
231*4882a593Smuzhiyun	 */
232*4882a593Smuzhiyun	mov.l	r0, @-r15
233*4882a593Smuzhiyun	mov.l	r1, @-r15
234*4882a593Smuzhiyun
235*4882a593Smuzhiyun	mov	#0, r4
236*4882a593Smuzhiyun
237*4882a593Smuzhiyun	mov.l	.Lftrace_return_to_handler, r0
238*4882a593Smuzhiyun	jsr	@r0
239*4882a593Smuzhiyun	 nop
240*4882a593Smuzhiyun
241*4882a593Smuzhiyun	/*
242*4882a593Smuzhiyun	 * The return value from ftrace_return_handler has the real
243*4882a593Smuzhiyun	 * address that we should return to.
244*4882a593Smuzhiyun	 */
245*4882a593Smuzhiyun	lds	r0, pr
246*4882a593Smuzhiyun	mov.l	@r15+, r1
247*4882a593Smuzhiyun	rts
248*4882a593Smuzhiyun	 mov.l	@r15+, r0
249*4882a593Smuzhiyun
250*4882a593Smuzhiyun
251*4882a593Smuzhiyun	.align 2
252*4882a593Smuzhiyun.Lftrace_return_to_handler:
253*4882a593Smuzhiyun	.long	ftrace_return_to_handler
254*4882a593Smuzhiyun#endif /* CONFIG_FUNCTION_GRAPH_TRACER */
255*4882a593Smuzhiyun#endif /* CONFIG_FUNCTION_TRACER */
256*4882a593Smuzhiyun
257*4882a593Smuzhiyun#ifdef CONFIG_STACK_DEBUG
258*4882a593Smuzhiyun	.globl	stack_panic
259*4882a593Smuzhiyunstack_panic:
260*4882a593Smuzhiyun	mov.l	.Ldump_stack, r0
261*4882a593Smuzhiyun	jsr	@r0
262*4882a593Smuzhiyun	 nop
263*4882a593Smuzhiyun
264*4882a593Smuzhiyun	mov.l	.Lpanic, r0
265*4882a593Smuzhiyun	jsr	@r0
266*4882a593Smuzhiyun	 mov.l	.Lpanic_s, r4
267*4882a593Smuzhiyun
268*4882a593Smuzhiyun	rts
269*4882a593Smuzhiyun	 nop
270*4882a593Smuzhiyun
271*4882a593Smuzhiyun	.align 2
272*4882a593Smuzhiyun.L_init_thread_union:
273*4882a593Smuzhiyun	.long	init_thread_union
274*4882a593Smuzhiyun.L_ebss:
275*4882a593Smuzhiyun	.long	__bss_stop
276*4882a593Smuzhiyun.Lpanic:
277*4882a593Smuzhiyun	.long	panic
278*4882a593Smuzhiyun.Lpanic_s:
279*4882a593Smuzhiyun	.long	.Lpanic_str
280*4882a593Smuzhiyun.Ldump_stack:
281*4882a593Smuzhiyun	.long	dump_stack
282*4882a593Smuzhiyun
283*4882a593Smuzhiyun	.section	.rodata
284*4882a593Smuzhiyun	.align 2
285*4882a593Smuzhiyun.Lpanic_str:
286*4882a593Smuzhiyun	.string "Stack error"
287*4882a593Smuzhiyun#endif /* CONFIG_STACK_DEBUG */
288