xref: /OK3568_Linux_fs/kernel/arch/m68k/include/asm/entry.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun #ifndef __M68K_ENTRY_H
3*4882a593Smuzhiyun #define __M68K_ENTRY_H
4*4882a593Smuzhiyun 
5*4882a593Smuzhiyun #include <asm/setup.h>
6*4882a593Smuzhiyun #include <asm/page.h>
7*4882a593Smuzhiyun #ifdef __ASSEMBLY__
8*4882a593Smuzhiyun #include <asm/thread_info.h>
9*4882a593Smuzhiyun #endif
10*4882a593Smuzhiyun 
11*4882a593Smuzhiyun /*
12*4882a593Smuzhiyun  * Stack layout in 'ret_from_exception':
13*4882a593Smuzhiyun  *
14*4882a593Smuzhiyun  *	This allows access to the syscall arguments in registers d1-d5
15*4882a593Smuzhiyun  *
16*4882a593Smuzhiyun  *	 0(sp) - d1
17*4882a593Smuzhiyun  *	 4(sp) - d2
18*4882a593Smuzhiyun  *	 8(sp) - d3
19*4882a593Smuzhiyun  *	 C(sp) - d4
20*4882a593Smuzhiyun  *	10(sp) - d5
21*4882a593Smuzhiyun  *	14(sp) - a0
22*4882a593Smuzhiyun  *	18(sp) - a1
23*4882a593Smuzhiyun  *	1C(sp) - a2
24*4882a593Smuzhiyun  *	20(sp) - d0
25*4882a593Smuzhiyun  *	24(sp) - orig_d0
26*4882a593Smuzhiyun  *	28(sp) - stack adjustment
27*4882a593Smuzhiyun  *	2C(sp) - [ sr              ] [ format & vector ]
28*4882a593Smuzhiyun  *	2E(sp) - [ pc-hiword       ] [ sr              ]
29*4882a593Smuzhiyun  *	30(sp) - [ pc-loword       ] [ pc-hiword       ]
30*4882a593Smuzhiyun  *	32(sp) - [ format & vector ] [ pc-loword       ]
31*4882a593Smuzhiyun  *		  ^^^^^^^^^^^^^^^^^   ^^^^^^^^^^^^^^^^^
32*4882a593Smuzhiyun  *			M68K		  COLDFIRE
33*4882a593Smuzhiyun  */
34*4882a593Smuzhiyun 
35*4882a593Smuzhiyun /* the following macro is used when enabling interrupts */
36*4882a593Smuzhiyun #if defined(MACH_ATARI_ONLY)
37*4882a593Smuzhiyun 	/* block out HSYNC = ipl 2 on the atari */
38*4882a593Smuzhiyun #define ALLOWINT	(~0x500)
39*4882a593Smuzhiyun #else
40*4882a593Smuzhiyun 	/* portable version */
41*4882a593Smuzhiyun #define ALLOWINT	(~0x700)
42*4882a593Smuzhiyun #endif /* machine compilation types */
43*4882a593Smuzhiyun 
44*4882a593Smuzhiyun #ifdef __ASSEMBLY__
45*4882a593Smuzhiyun /*
46*4882a593Smuzhiyun  * This defines the normal kernel pt-regs layout.
47*4882a593Smuzhiyun  *
48*4882a593Smuzhiyun  * regs a3-a6 and d6-d7 are preserved by C code
49*4882a593Smuzhiyun  * the kernel doesn't mess with usp unless it needs to
50*4882a593Smuzhiyun  */
51*4882a593Smuzhiyun #define SWITCH_STACK_SIZE	(6*4+4)	/* includes return address */
52*4882a593Smuzhiyun 
53*4882a593Smuzhiyun #ifdef CONFIG_COLDFIRE
54*4882a593Smuzhiyun #ifdef CONFIG_COLDFIRE_SW_A7
55*4882a593Smuzhiyun /*
56*4882a593Smuzhiyun  * This is made a little more tricky on older ColdFires. There is no
57*4882a593Smuzhiyun  * separate supervisor and user stack pointers. Need to artificially
58*4882a593Smuzhiyun  * construct a usp in software... When doing this we need to disable
59*4882a593Smuzhiyun  * interrupts, otherwise bad things will happen.
60*4882a593Smuzhiyun  */
61*4882a593Smuzhiyun .globl sw_usp
62*4882a593Smuzhiyun .globl sw_ksp
63*4882a593Smuzhiyun 
64*4882a593Smuzhiyun .macro SAVE_ALL_SYS
65*4882a593Smuzhiyun 	move	#0x2700,%sr		/* disable intrs */
66*4882a593Smuzhiyun 	btst	#5,%sp@(2)		/* from user? */
67*4882a593Smuzhiyun 	bnes	6f			/* no, skip */
68*4882a593Smuzhiyun 	movel	%sp,sw_usp		/* save user sp */
69*4882a593Smuzhiyun 	addql	#8,sw_usp		/* remove exception */
70*4882a593Smuzhiyun 	movel	sw_ksp,%sp		/* kernel sp */
71*4882a593Smuzhiyun 	subql	#8,%sp			/* room for exception */
72*4882a593Smuzhiyun 	clrl	%sp@-			/* stkadj */
73*4882a593Smuzhiyun 	movel	%d0,%sp@-		/* orig d0 */
74*4882a593Smuzhiyun 	movel	%d0,%sp@-		/* d0 */
75*4882a593Smuzhiyun 	lea	%sp@(-32),%sp		/* space for 8 regs */
76*4882a593Smuzhiyun 	moveml	%d1-%d5/%a0-%a2,%sp@
77*4882a593Smuzhiyun 	movel	sw_usp,%a0		/* get usp */
78*4882a593Smuzhiyun 	movel	%a0@-,%sp@(PT_OFF_PC)	/* copy exception program counter */
79*4882a593Smuzhiyun 	movel	%a0@-,%sp@(PT_OFF_FORMATVEC)/*copy exception format/vector/sr */
80*4882a593Smuzhiyun 	bra	7f
81*4882a593Smuzhiyun 	6:
82*4882a593Smuzhiyun 	clrl	%sp@-			/* stkadj */
83*4882a593Smuzhiyun 	movel	%d0,%sp@-		/* orig d0 */
84*4882a593Smuzhiyun 	movel	%d0,%sp@-		/* d0 */
85*4882a593Smuzhiyun 	lea	%sp@(-32),%sp		/* space for 8 regs */
86*4882a593Smuzhiyun 	moveml	%d1-%d5/%a0-%a2,%sp@
87*4882a593Smuzhiyun 	7:
88*4882a593Smuzhiyun .endm
89*4882a593Smuzhiyun 
90*4882a593Smuzhiyun .macro SAVE_ALL_INT
91*4882a593Smuzhiyun 	SAVE_ALL_SYS
92*4882a593Smuzhiyun 	moveq	#-1,%d0			/* not system call entry */
93*4882a593Smuzhiyun 	movel	%d0,%sp@(PT_OFF_ORIG_D0)
94*4882a593Smuzhiyun .endm
95*4882a593Smuzhiyun 
96*4882a593Smuzhiyun .macro RESTORE_USER
97*4882a593Smuzhiyun 	move	#0x2700,%sr		/* disable intrs */
98*4882a593Smuzhiyun 	movel	sw_usp,%a0		/* get usp */
99*4882a593Smuzhiyun 	movel	%sp@(PT_OFF_PC),%a0@-	/* copy exception program counter */
100*4882a593Smuzhiyun 	movel	%sp@(PT_OFF_FORMATVEC),%a0@-/*copy exception format/vector/sr */
101*4882a593Smuzhiyun 	moveml	%sp@,%d1-%d5/%a0-%a2
102*4882a593Smuzhiyun 	lea	%sp@(32),%sp		/* space for 8 regs */
103*4882a593Smuzhiyun 	movel	%sp@+,%d0
104*4882a593Smuzhiyun 	addql	#4,%sp			/* orig d0 */
105*4882a593Smuzhiyun 	addl	%sp@+,%sp		/* stkadj */
106*4882a593Smuzhiyun 	addql	#8,%sp			/* remove exception */
107*4882a593Smuzhiyun 	movel	%sp,sw_ksp		/* save ksp */
108*4882a593Smuzhiyun 	subql	#8,sw_usp		/* set exception */
109*4882a593Smuzhiyun 	movel	sw_usp,%sp		/* restore usp */
110*4882a593Smuzhiyun 	rte
111*4882a593Smuzhiyun .endm
112*4882a593Smuzhiyun 
113*4882a593Smuzhiyun .macro RDUSP
114*4882a593Smuzhiyun 	movel	sw_usp,%a3
115*4882a593Smuzhiyun .endm
116*4882a593Smuzhiyun 
117*4882a593Smuzhiyun .macro WRUSP
118*4882a593Smuzhiyun 	movel	%a3,sw_usp
119*4882a593Smuzhiyun .endm
120*4882a593Smuzhiyun 
121*4882a593Smuzhiyun #else /* !CONFIG_COLDFIRE_SW_A7 */
122*4882a593Smuzhiyun /*
123*4882a593Smuzhiyun  * Modern ColdFire parts have separate supervisor and user stack
124*4882a593Smuzhiyun  * pointers. Simple load and restore macros for this case.
125*4882a593Smuzhiyun  */
126*4882a593Smuzhiyun .macro SAVE_ALL_SYS
127*4882a593Smuzhiyun 	move	#0x2700,%sr		/* disable intrs */
128*4882a593Smuzhiyun 	clrl	%sp@-			/* stkadj */
129*4882a593Smuzhiyun 	movel	%d0,%sp@-		/* orig d0 */
130*4882a593Smuzhiyun 	movel	%d0,%sp@-		/* d0 */
131*4882a593Smuzhiyun 	lea	%sp@(-32),%sp		/* space for 8 regs */
132*4882a593Smuzhiyun 	moveml	%d1-%d5/%a0-%a2,%sp@
133*4882a593Smuzhiyun .endm
134*4882a593Smuzhiyun 
135*4882a593Smuzhiyun .macro SAVE_ALL_INT
136*4882a593Smuzhiyun 	move	#0x2700,%sr		/* disable intrs */
137*4882a593Smuzhiyun 	clrl	%sp@-			/* stkadj */
138*4882a593Smuzhiyun 	pea	-1:w			/* orig d0 */
139*4882a593Smuzhiyun 	movel	%d0,%sp@-		/* d0 */
140*4882a593Smuzhiyun 	lea	%sp@(-32),%sp		/* space for 8 regs */
141*4882a593Smuzhiyun 	moveml	%d1-%d5/%a0-%a2,%sp@
142*4882a593Smuzhiyun .endm
143*4882a593Smuzhiyun 
144*4882a593Smuzhiyun .macro RESTORE_USER
145*4882a593Smuzhiyun 	moveml	%sp@,%d1-%d5/%a0-%a2
146*4882a593Smuzhiyun 	lea	%sp@(32),%sp		/* space for 8 regs */
147*4882a593Smuzhiyun 	movel	%sp@+,%d0
148*4882a593Smuzhiyun 	addql	#4,%sp			/* orig d0 */
149*4882a593Smuzhiyun 	addl	%sp@+,%sp		/* stkadj */
150*4882a593Smuzhiyun 	rte
151*4882a593Smuzhiyun .endm
152*4882a593Smuzhiyun 
153*4882a593Smuzhiyun .macro RDUSP
154*4882a593Smuzhiyun 	/*move	%usp,%a3*/
155*4882a593Smuzhiyun 	.word	0x4e6b
156*4882a593Smuzhiyun .endm
157*4882a593Smuzhiyun 
158*4882a593Smuzhiyun .macro WRUSP
159*4882a593Smuzhiyun 	/*move	%a3,%usp*/
160*4882a593Smuzhiyun 	.word	0x4e63
161*4882a593Smuzhiyun .endm
162*4882a593Smuzhiyun 
163*4882a593Smuzhiyun #endif /* !CONFIG_COLDFIRE_SW_A7 */
164*4882a593Smuzhiyun 
165*4882a593Smuzhiyun .macro SAVE_SWITCH_STACK
166*4882a593Smuzhiyun 	lea	%sp@(-24),%sp		/* 6 regs */
167*4882a593Smuzhiyun 	moveml	%a3-%a6/%d6-%d7,%sp@
168*4882a593Smuzhiyun .endm
169*4882a593Smuzhiyun 
170*4882a593Smuzhiyun .macro RESTORE_SWITCH_STACK
171*4882a593Smuzhiyun 	moveml	%sp@,%a3-%a6/%d6-%d7
172*4882a593Smuzhiyun 	lea	%sp@(24),%sp		/* 6 regs */
173*4882a593Smuzhiyun .endm
174*4882a593Smuzhiyun 
175*4882a593Smuzhiyun #else /* !CONFIG_COLDFIRE */
176*4882a593Smuzhiyun 
177*4882a593Smuzhiyun /*
178*4882a593Smuzhiyun  * All other types of m68k parts (68000, 680x0, CPU32) have the same
179*4882a593Smuzhiyun  * entry and exit code.
180*4882a593Smuzhiyun  */
181*4882a593Smuzhiyun 
182*4882a593Smuzhiyun /*
183*4882a593Smuzhiyun  * a -1 in the orig_d0 field signifies
184*4882a593Smuzhiyun  * that the stack frame is NOT for syscall
185*4882a593Smuzhiyun  */
186*4882a593Smuzhiyun .macro SAVE_ALL_INT
187*4882a593Smuzhiyun 	clrl	%sp@-			/* stk_adj */
188*4882a593Smuzhiyun 	pea	-1:w			/* orig d0 */
189*4882a593Smuzhiyun 	movel	%d0,%sp@-		/* d0 */
190*4882a593Smuzhiyun 	moveml	%d1-%d5/%a0-%a2,%sp@-
191*4882a593Smuzhiyun .endm
192*4882a593Smuzhiyun 
193*4882a593Smuzhiyun .macro SAVE_ALL_SYS
194*4882a593Smuzhiyun 	clrl	%sp@-			/* stk_adj */
195*4882a593Smuzhiyun 	movel	%d0,%sp@-		/* orig d0 */
196*4882a593Smuzhiyun 	movel	%d0,%sp@-		/* d0 */
197*4882a593Smuzhiyun 	moveml	%d1-%d5/%a0-%a2,%sp@-
198*4882a593Smuzhiyun .endm
199*4882a593Smuzhiyun 
200*4882a593Smuzhiyun .macro RESTORE_ALL
201*4882a593Smuzhiyun 	moveml	%sp@+,%a0-%a2/%d1-%d5
202*4882a593Smuzhiyun 	movel	%sp@+,%d0
203*4882a593Smuzhiyun 	addql	#4,%sp			/* orig d0 */
204*4882a593Smuzhiyun 	addl	%sp@+,%sp		/* stk adj */
205*4882a593Smuzhiyun 	rte
206*4882a593Smuzhiyun .endm
207*4882a593Smuzhiyun 
208*4882a593Smuzhiyun 
209*4882a593Smuzhiyun .macro SAVE_SWITCH_STACK
210*4882a593Smuzhiyun 	moveml	%a3-%a6/%d6-%d7,%sp@-
211*4882a593Smuzhiyun .endm
212*4882a593Smuzhiyun 
213*4882a593Smuzhiyun .macro RESTORE_SWITCH_STACK
214*4882a593Smuzhiyun 	moveml	%sp@+,%a3-%a6/%d6-%d7
215*4882a593Smuzhiyun .endm
216*4882a593Smuzhiyun 
217*4882a593Smuzhiyun #endif /* !CONFIG_COLDFIRE */
218*4882a593Smuzhiyun 
219*4882a593Smuzhiyun /*
220*4882a593Smuzhiyun  * Register %a2 is reserved and set to current task on MMU enabled systems.
221*4882a593Smuzhiyun  * Non-MMU systems do not reserve %a2 in this way, and this definition is
222*4882a593Smuzhiyun  * not used for them.
223*4882a593Smuzhiyun  */
224*4882a593Smuzhiyun #ifdef CONFIG_MMU
225*4882a593Smuzhiyun 
226*4882a593Smuzhiyun #define curptr a2
227*4882a593Smuzhiyun 
228*4882a593Smuzhiyun #define GET_CURRENT(tmp) get_current tmp
229*4882a593Smuzhiyun .macro get_current reg=%d0
230*4882a593Smuzhiyun 	movel	%sp,\reg
231*4882a593Smuzhiyun 	andl	#-THREAD_SIZE,\reg
232*4882a593Smuzhiyun 	movel	\reg,%curptr
233*4882a593Smuzhiyun 	movel	%curptr@,%curptr
234*4882a593Smuzhiyun .endm
235*4882a593Smuzhiyun 
236*4882a593Smuzhiyun #else
237*4882a593Smuzhiyun 
238*4882a593Smuzhiyun #define GET_CURRENT(tmp)
239*4882a593Smuzhiyun 
240*4882a593Smuzhiyun #endif /* CONFIG_MMU */
241*4882a593Smuzhiyun 
242*4882a593Smuzhiyun #else /* C source */
243*4882a593Smuzhiyun 
244*4882a593Smuzhiyun #define STR(X) STR1(X)
245*4882a593Smuzhiyun #define STR1(X) #X
246*4882a593Smuzhiyun 
247*4882a593Smuzhiyun #define SAVE_ALL_INT				\
248*4882a593Smuzhiyun 	"clrl	%%sp@-;"    /* stk_adj */	\
249*4882a593Smuzhiyun 	"pea	-1:w;"	    /* orig d0 = -1 */	\
250*4882a593Smuzhiyun 	"movel	%%d0,%%sp@-;" /* d0 */		\
251*4882a593Smuzhiyun 	"moveml	%%d1-%%d5/%%a0-%%a2,%%sp@-"
252*4882a593Smuzhiyun 
253*4882a593Smuzhiyun #define GET_CURRENT(tmp) \
254*4882a593Smuzhiyun 	"movel	%%sp,"#tmp"\n\t" \
255*4882a593Smuzhiyun 	"andw	#-"STR(THREAD_SIZE)","#tmp"\n\t" \
256*4882a593Smuzhiyun 	"movel	"#tmp",%%a2\n\t" \
257*4882a593Smuzhiyun 	"movel	%%a2@,%%a2"
258*4882a593Smuzhiyun 
259*4882a593Smuzhiyun #endif
260*4882a593Smuzhiyun 
261*4882a593Smuzhiyun #endif /* __M68K_ENTRY_H */
262