xref: /OK3568_Linux_fs/kernel/arch/m68k/fpsp040/gen_except.S (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun|
2*4882a593Smuzhiyun|	gen_except.sa 3.7 1/16/92
3*4882a593Smuzhiyun|
4*4882a593Smuzhiyun|	gen_except --- FPSP routine to detect reportable exceptions
5*4882a593Smuzhiyun|
6*4882a593Smuzhiyun|	This routine compares the exception enable byte of the
7*4882a593Smuzhiyun|	user_fpcr on the stack with the exception status byte
8*4882a593Smuzhiyun|	of the user_fpsr.
9*4882a593Smuzhiyun|
10*4882a593Smuzhiyun|	Any routine which may report an exceptions must load
11*4882a593Smuzhiyun|	the stack frame in memory with the exceptional operand(s).
12*4882a593Smuzhiyun|
13*4882a593Smuzhiyun|	Priority for exceptions is:
14*4882a593Smuzhiyun|
15*4882a593Smuzhiyun|	Highest:	bsun
16*4882a593Smuzhiyun|			snan
17*4882a593Smuzhiyun|			operr
18*4882a593Smuzhiyun|			ovfl
19*4882a593Smuzhiyun|			unfl
20*4882a593Smuzhiyun|			dz
21*4882a593Smuzhiyun|			inex2
22*4882a593Smuzhiyun|	Lowest:		inex1
23*4882a593Smuzhiyun|
24*4882a593Smuzhiyun|	Note: The IEEE standard specifies that inex2 is to be
25*4882a593Smuzhiyun|	reported if ovfl occurs and the ovfl enable bit is not
26*4882a593Smuzhiyun|	set but the inex2 enable bit is.
27*4882a593Smuzhiyun|
28*4882a593Smuzhiyun|
29*4882a593Smuzhiyun|		Copyright (C) Motorola, Inc. 1990
30*4882a593Smuzhiyun|			All Rights Reserved
31*4882a593Smuzhiyun|
32*4882a593Smuzhiyun|       For details on the license for this file, please see the
33*4882a593Smuzhiyun|       file, README, in this same directory.
34*4882a593Smuzhiyun
35*4882a593SmuzhiyunGEN_EXCEPT:    |idnt    2,1 | Motorola 040 Floating Point Software Package
36*4882a593Smuzhiyun
37*4882a593Smuzhiyun	|section 8
38*4882a593Smuzhiyun
39*4882a593Smuzhiyun#include "fpsp.h"
40*4882a593Smuzhiyun
41*4882a593Smuzhiyun	|xref	real_trace
42*4882a593Smuzhiyun	|xref	fpsp_done
43*4882a593Smuzhiyun	|xref	fpsp_fmt_error
44*4882a593Smuzhiyun
45*4882a593Smuzhiyunexc_tbl:
46*4882a593Smuzhiyun	.long	bsun_exc
47*4882a593Smuzhiyun	.long	commonE1
48*4882a593Smuzhiyun	.long	commonE1
49*4882a593Smuzhiyun	.long	ovfl_unfl
50*4882a593Smuzhiyun	.long	ovfl_unfl
51*4882a593Smuzhiyun	.long	commonE1
52*4882a593Smuzhiyun	.long	commonE3
53*4882a593Smuzhiyun	.long	commonE3
54*4882a593Smuzhiyun	.long	no_match
55*4882a593Smuzhiyun
56*4882a593Smuzhiyun	.global	gen_except
57*4882a593Smuzhiyungen_except:
58*4882a593Smuzhiyun	cmpib	#IDLE_SIZE-4,1(%a7)	|test for idle frame
59*4882a593Smuzhiyun	beq	do_check		|go handle idle frame
60*4882a593Smuzhiyun	cmpib	#UNIMP_40_SIZE-4,1(%a7)	|test for orig unimp frame
61*4882a593Smuzhiyun	beqs	unimp_x			|go handle unimp frame
62*4882a593Smuzhiyun	cmpib	#UNIMP_41_SIZE-4,1(%a7)	|test for rev unimp frame
63*4882a593Smuzhiyun	beqs	unimp_x			|go handle unimp frame
64*4882a593Smuzhiyun	cmpib	#BUSY_SIZE-4,1(%a7)	|if size <> $60, fmt error
65*4882a593Smuzhiyun	bnel	fpsp_fmt_error
66*4882a593Smuzhiyun	leal	BUSY_SIZE+LOCAL_SIZE(%a7),%a1 |init a1 so fpsp.h
67*4882a593Smuzhiyun|					;equates will work
68*4882a593Smuzhiyun| Fix up the new busy frame with entries from the unimp frame
69*4882a593Smuzhiyun|
70*4882a593Smuzhiyun	movel	ETEMP_EX(%a6),ETEMP_EX(%a1) |copy etemp from unimp
71*4882a593Smuzhiyun	movel	ETEMP_HI(%a6),ETEMP_HI(%a1) |frame to busy frame
72*4882a593Smuzhiyun	movel	ETEMP_LO(%a6),ETEMP_LO(%a1)
73*4882a593Smuzhiyun	movel	CMDREG1B(%a6),CMDREG1B(%a1) |set inst in frame to unimp
74*4882a593Smuzhiyun	movel	CMDREG1B(%a6),%d0		|fix cmd1b to make it
75*4882a593Smuzhiyun	andl	#0x03c30000,%d0		|work for cmd3b
76*4882a593Smuzhiyun	bfextu	CMDREG1B(%a6){#13:#1},%d1	|extract bit 2
77*4882a593Smuzhiyun	lsll	#5,%d1
78*4882a593Smuzhiyun	swap	%d1
79*4882a593Smuzhiyun	orl	%d1,%d0			|put it in the right place
80*4882a593Smuzhiyun	bfextu	CMDREG1B(%a6){#10:#3},%d1	|extract bit 3,4,5
81*4882a593Smuzhiyun	lsll	#2,%d1
82*4882a593Smuzhiyun	swap	%d1
83*4882a593Smuzhiyun	orl	%d1,%d0			|put them in the right place
84*4882a593Smuzhiyun	movel	%d0,CMDREG3B(%a1)		|in the busy frame
85*4882a593Smuzhiyun|
86*4882a593Smuzhiyun| Or in the FPSR from the emulation with the USER_FPSR on the stack.
87*4882a593Smuzhiyun|
88*4882a593Smuzhiyun	fmovel	%FPSR,%d0
89*4882a593Smuzhiyun	orl	%d0,USER_FPSR(%a6)
90*4882a593Smuzhiyun	movel	USER_FPSR(%a6),FPSR_SHADOW(%a1) |set exc bits
91*4882a593Smuzhiyun	orl	#sx_mask,E_BYTE(%a1)
92*4882a593Smuzhiyun	bra	do_clean
93*4882a593Smuzhiyun
94*4882a593Smuzhiyun|
95*4882a593Smuzhiyun| Frame is an unimp frame possible resulting from an fmove <ea>,fp0
96*4882a593Smuzhiyun| that caused an exception
97*4882a593Smuzhiyun|
98*4882a593Smuzhiyun| a1 is modified to point into the new frame allowing fpsp equates
99*4882a593Smuzhiyun| to be valid.
100*4882a593Smuzhiyun|
101*4882a593Smuzhiyununimp_x:
102*4882a593Smuzhiyun	cmpib	#UNIMP_40_SIZE-4,1(%a7)	|test for orig unimp frame
103*4882a593Smuzhiyun	bnes	test_rev
104*4882a593Smuzhiyun	leal	UNIMP_40_SIZE+LOCAL_SIZE(%a7),%a1
105*4882a593Smuzhiyun	bras	unimp_con
106*4882a593Smuzhiyuntest_rev:
107*4882a593Smuzhiyun	cmpib	#UNIMP_41_SIZE-4,1(%a7)	|test for rev unimp frame
108*4882a593Smuzhiyun	bnel	fpsp_fmt_error		|if not $28 or $30
109*4882a593Smuzhiyun	leal	UNIMP_41_SIZE+LOCAL_SIZE(%a7),%a1
110*4882a593Smuzhiyun
111*4882a593Smuzhiyununimp_con:
112*4882a593Smuzhiyun|
113*4882a593Smuzhiyun| Fix up the new unimp frame with entries from the old unimp frame
114*4882a593Smuzhiyun|
115*4882a593Smuzhiyun	movel	CMDREG1B(%a6),CMDREG1B(%a1) |set inst in frame to unimp
116*4882a593Smuzhiyun|
117*4882a593Smuzhiyun| Or in the FPSR from the emulation with the USER_FPSR on the stack.
118*4882a593Smuzhiyun|
119*4882a593Smuzhiyun	fmovel	%FPSR,%d0
120*4882a593Smuzhiyun	orl	%d0,USER_FPSR(%a6)
121*4882a593Smuzhiyun	bra	do_clean
122*4882a593Smuzhiyun
123*4882a593Smuzhiyun|
124*4882a593Smuzhiyun| Frame is idle, so check for exceptions reported through
125*4882a593Smuzhiyun| USER_FPSR and set the unimp frame accordingly.
126*4882a593Smuzhiyun| A7 must be incremented to the point before the
127*4882a593Smuzhiyun| idle fsave vector to the unimp vector.
128*4882a593Smuzhiyun|
129*4882a593Smuzhiyun
130*4882a593Smuzhiyundo_check:
131*4882a593Smuzhiyun	addl	#4,%a7			|point A7 back to unimp frame
132*4882a593Smuzhiyun|
133*4882a593Smuzhiyun| Or in the FPSR from the emulation with the USER_FPSR on the stack.
134*4882a593Smuzhiyun|
135*4882a593Smuzhiyun	fmovel	%FPSR,%d0
136*4882a593Smuzhiyun	orl	%d0,USER_FPSR(%a6)
137*4882a593Smuzhiyun|
138*4882a593Smuzhiyun| On a busy frame, we must clear the nmnexc bits.
139*4882a593Smuzhiyun|
140*4882a593Smuzhiyun	cmpib	#BUSY_SIZE-4,1(%a7)	|check frame type
141*4882a593Smuzhiyun	bnes	check_fr		|if busy, clr nmnexc
142*4882a593Smuzhiyun	clrw	NMNEXC(%a6)		|clr nmnexc & nmcexc
143*4882a593Smuzhiyun	btstb	#5,CMDREG1B(%a6)		|test for fmove out
144*4882a593Smuzhiyun	bnes	frame_com
145*4882a593Smuzhiyun	movel	USER_FPSR(%a6),FPSR_SHADOW(%a6) |set exc bits
146*4882a593Smuzhiyun	orl	#sx_mask,E_BYTE(%a6)
147*4882a593Smuzhiyun	bras	frame_com
148*4882a593Smuzhiyuncheck_fr:
149*4882a593Smuzhiyun	cmpb	#UNIMP_40_SIZE-4,1(%a7)
150*4882a593Smuzhiyun	beqs	frame_com
151*4882a593Smuzhiyun	clrw	NMNEXC(%a6)
152*4882a593Smuzhiyunframe_com:
153*4882a593Smuzhiyun	moveb	FPCR_ENABLE(%a6),%d0	|get fpcr enable byte
154*4882a593Smuzhiyun	andb	FPSR_EXCEPT(%a6),%d0	|and in the fpsr exc byte
155*4882a593Smuzhiyun	bfffo	%d0{#24:#8},%d1		|test for first set bit
156*4882a593Smuzhiyun	leal	exc_tbl,%a0		|load jmp table address
157*4882a593Smuzhiyun	subib	#24,%d1			|normalize bit offset to 0-8
158*4882a593Smuzhiyun	movel	(%a0,%d1.w*4),%a0		|load routine address based
159*4882a593Smuzhiyun|					;based on first enabled exc
160*4882a593Smuzhiyun	jmp	(%a0)			|jump to routine
161*4882a593Smuzhiyun|
162*4882a593Smuzhiyun| Bsun is not possible in unimp or unsupp
163*4882a593Smuzhiyun|
164*4882a593Smuzhiyunbsun_exc:
165*4882a593Smuzhiyun	bra	do_clean
166*4882a593Smuzhiyun|
167*4882a593Smuzhiyun| The typical work to be done to the unimp frame to report an
168*4882a593Smuzhiyun| exception is to set the E1/E3 byte and clr the U flag.
169*4882a593Smuzhiyun| commonE1 does this for E1 exceptions, which are snan,
170*4882a593Smuzhiyun| operr, and dz.  commonE3 does this for E3 exceptions, which
171*4882a593Smuzhiyun| are inex2 and inex1, and also clears the E1 exception bit
172*4882a593Smuzhiyun| left over from the unimp exception.
173*4882a593Smuzhiyun|
174*4882a593SmuzhiyuncommonE1:
175*4882a593Smuzhiyun	bsetb	#E1,E_BYTE(%a6)		|set E1 flag
176*4882a593Smuzhiyun	bra	commonE			|go clean and exit
177*4882a593Smuzhiyun
178*4882a593SmuzhiyuncommonE3:
179*4882a593Smuzhiyun	tstb	UFLG_TMP(%a6)		|test flag for unsup/unimp state
180*4882a593Smuzhiyun	bnes	unsE3
181*4882a593SmuzhiyununiE3:
182*4882a593Smuzhiyun	bsetb	#E3,E_BYTE(%a6)		|set E3 flag
183*4882a593Smuzhiyun	bclrb	#E1,E_BYTE(%a6)		|clr E1 from unimp
184*4882a593Smuzhiyun	bra	commonE
185*4882a593Smuzhiyun
186*4882a593SmuzhiyununsE3:
187*4882a593Smuzhiyun	tstb	RES_FLG(%a6)
188*4882a593Smuzhiyun	bnes	unsE3_0
189*4882a593SmuzhiyununsE3_1:
190*4882a593Smuzhiyun	bsetb	#E3,E_BYTE(%a6)		|set E3 flag
191*4882a593SmuzhiyununsE3_0:
192*4882a593Smuzhiyun	bclrb	#E1,E_BYTE(%a6)		|clr E1 flag
193*4882a593Smuzhiyun	movel	CMDREG1B(%a6),%d0
194*4882a593Smuzhiyun	andl	#0x03c30000,%d0		|work for cmd3b
195*4882a593Smuzhiyun	bfextu	CMDREG1B(%a6){#13:#1},%d1	|extract bit 2
196*4882a593Smuzhiyun	lsll	#5,%d1
197*4882a593Smuzhiyun	swap	%d1
198*4882a593Smuzhiyun	orl	%d1,%d0			|put it in the right place
199*4882a593Smuzhiyun	bfextu	CMDREG1B(%a6){#10:#3},%d1	|extract bit 3,4,5
200*4882a593Smuzhiyun	lsll	#2,%d1
201*4882a593Smuzhiyun	swap	%d1
202*4882a593Smuzhiyun	orl	%d1,%d0			|put them in the right place
203*4882a593Smuzhiyun	movel	%d0,CMDREG3B(%a6)		|in the busy frame
204*4882a593Smuzhiyun
205*4882a593SmuzhiyuncommonE:
206*4882a593Smuzhiyun	bclrb	#UFLAG,T_BYTE(%a6)	|clr U flag from unimp
207*4882a593Smuzhiyun	bra	do_clean		|go clean and exit
208*4882a593Smuzhiyun|
209*4882a593Smuzhiyun| No bits in the enable byte match existing exceptions.  Check for
210*4882a593Smuzhiyun| the case of the ovfl exc without the ovfl enabled, but with
211*4882a593Smuzhiyun| inex2 enabled.
212*4882a593Smuzhiyun|
213*4882a593Smuzhiyunno_match:
214*4882a593Smuzhiyun	btstb	#inex2_bit,FPCR_ENABLE(%a6) |check for ovfl/inex2 case
215*4882a593Smuzhiyun	beqs	no_exc			|if clear, exit
216*4882a593Smuzhiyun	btstb	#ovfl_bit,FPSR_EXCEPT(%a6) |now check ovfl
217*4882a593Smuzhiyun	beqs	no_exc			|if clear, exit
218*4882a593Smuzhiyun	bras	ovfl_unfl		|go to unfl_ovfl to determine if
219*4882a593Smuzhiyun|					;it is an unsupp or unimp exc
220*4882a593Smuzhiyun
221*4882a593Smuzhiyun| No exceptions are to be reported.  If the instruction was
222*4882a593Smuzhiyun| unimplemented, no FPU restore is necessary.  If it was
223*4882a593Smuzhiyun| unsupported, we must perform the restore.
224*4882a593Smuzhiyunno_exc:
225*4882a593Smuzhiyun	tstb	UFLG_TMP(%a6)	|test flag for unsupp/unimp state
226*4882a593Smuzhiyun	beqs	uni_no_exc
227*4882a593Smuzhiyununs_no_exc:
228*4882a593Smuzhiyun	tstb	RES_FLG(%a6)	|check if frestore is needed
229*4882a593Smuzhiyun	bne	do_clean	|if clear, no frestore needed
230*4882a593Smuzhiyununi_no_exc:
231*4882a593Smuzhiyun	moveml	USER_DA(%a6),%d0-%d1/%a0-%a1
232*4882a593Smuzhiyun	fmovemx USER_FP0(%a6),%fp0-%fp3
233*4882a593Smuzhiyun	fmoveml USER_FPCR(%a6),%fpcr/%fpsr/%fpiar
234*4882a593Smuzhiyun	unlk	%a6
235*4882a593Smuzhiyun	bra	finish_up
236*4882a593Smuzhiyun|
237*4882a593Smuzhiyun| Unsupported Data Type Handler:
238*4882a593Smuzhiyun| Ovfl:
239*4882a593Smuzhiyun|   An fmoveout that results in an overflow is reported this way.
240*4882a593Smuzhiyun| Unfl:
241*4882a593Smuzhiyun|   An fmoveout that results in an underflow is reported this way.
242*4882a593Smuzhiyun|
243*4882a593Smuzhiyun| Unimplemented Instruction Handler:
244*4882a593Smuzhiyun| Ovfl:
245*4882a593Smuzhiyun|   Only scosh, setox, ssinh, stwotox, and scale can set overflow in
246*4882a593Smuzhiyun|   this manner.
247*4882a593Smuzhiyun| Unfl:
248*4882a593Smuzhiyun|   Stwotox, setox, and scale can set underflow in this manner.
249*4882a593Smuzhiyun|   Any of the other Library Routines such that f(x)=x in which
250*4882a593Smuzhiyun|   x is an extended denorm can report an underflow exception.
251*4882a593Smuzhiyun|   It is the responsibility of the exception-causing exception
252*4882a593Smuzhiyun|   to make sure that WBTEMP is correct.
253*4882a593Smuzhiyun|
254*4882a593Smuzhiyun|   The exceptional operand is in FP_SCR1.
255*4882a593Smuzhiyun|
256*4882a593Smuzhiyunovfl_unfl:
257*4882a593Smuzhiyun	tstb	UFLG_TMP(%a6)	|test flag for unsupp/unimp state
258*4882a593Smuzhiyun	beqs	ofuf_con
259*4882a593Smuzhiyun|
260*4882a593Smuzhiyun| The caller was from an unsupported data type trap.  Test if the
261*4882a593Smuzhiyun| caller set CU_ONLY.  If so, the exceptional operand is expected in
262*4882a593Smuzhiyun| FPTEMP, rather than WBTEMP.
263*4882a593Smuzhiyun|
264*4882a593Smuzhiyun	tstb	CU_ONLY(%a6)		|test if inst is cu-only
265*4882a593Smuzhiyun	beq	unsE3
266*4882a593Smuzhiyun|	move.w	#$fe,CU_SAVEPC(%a6)
267*4882a593Smuzhiyun	clrb	CU_SAVEPC(%a6)
268*4882a593Smuzhiyun	bsetb	#E1,E_BYTE(%a6)		|set E1 exception flag
269*4882a593Smuzhiyun	movew	ETEMP_EX(%a6),FPTEMP_EX(%a6)
270*4882a593Smuzhiyun	movel	ETEMP_HI(%a6),FPTEMP_HI(%a6)
271*4882a593Smuzhiyun	movel	ETEMP_LO(%a6),FPTEMP_LO(%a6)
272*4882a593Smuzhiyun	bsetb	#fptemp15_bit,DTAG(%a6)	|set fpte15
273*4882a593Smuzhiyun	bclrb	#UFLAG,T_BYTE(%a6)	|clr U flag from unimp
274*4882a593Smuzhiyun	bra	do_clean		|go clean and exit
275*4882a593Smuzhiyun
276*4882a593Smuzhiyunofuf_con:
277*4882a593Smuzhiyun	moveb	(%a7),VER_TMP(%a6)	|save version number
278*4882a593Smuzhiyun	cmpib	#BUSY_SIZE-4,1(%a7)	|check for busy frame
279*4882a593Smuzhiyun	beqs	busy_fr			|if unimp, grow to busy
280*4882a593Smuzhiyun	cmpib	#VER_40,(%a7)		|test for orig unimp frame
281*4882a593Smuzhiyun	bnes	try_41			|if not, test for rev frame
282*4882a593Smuzhiyun	moveql	#13,%d0			|need to zero 14 lwords
283*4882a593Smuzhiyun	bras	ofuf_fin
284*4882a593Smuzhiyuntry_41:
285*4882a593Smuzhiyun	cmpib	#VER_41,(%a7)		|test for rev unimp frame
286*4882a593Smuzhiyun	bnel	fpsp_fmt_error		|if neither, exit with error
287*4882a593Smuzhiyun	moveql	#11,%d0			|need to zero 12 lwords
288*4882a593Smuzhiyun
289*4882a593Smuzhiyunofuf_fin:
290*4882a593Smuzhiyun	clrl	(%a7)
291*4882a593Smuzhiyunloop1:
292*4882a593Smuzhiyun	clrl	-(%a7)			|clear and dec a7
293*4882a593Smuzhiyun	dbra	%d0,loop1
294*4882a593Smuzhiyun	moveb	VER_TMP(%a6),(%a7)
295*4882a593Smuzhiyun	moveb	#BUSY_SIZE-4,1(%a7)		|write busy fmt word.
296*4882a593Smuzhiyunbusy_fr:
297*4882a593Smuzhiyun	movel	FP_SCR1(%a6),WBTEMP_EX(%a6)	|write
298*4882a593Smuzhiyun	movel	FP_SCR1+4(%a6),WBTEMP_HI(%a6)	|exceptional op to
299*4882a593Smuzhiyun	movel	FP_SCR1+8(%a6),WBTEMP_LO(%a6)	|wbtemp
300*4882a593Smuzhiyun	bsetb	#E3,E_BYTE(%a6)			|set E3 flag
301*4882a593Smuzhiyun	bclrb	#E1,E_BYTE(%a6)			|make sure E1 is clear
302*4882a593Smuzhiyun	bclrb	#UFLAG,T_BYTE(%a6)		|clr U flag
303*4882a593Smuzhiyun	movel	USER_FPSR(%a6),FPSR_SHADOW(%a6)
304*4882a593Smuzhiyun	orl	#sx_mask,E_BYTE(%a6)
305*4882a593Smuzhiyun	movel	CMDREG1B(%a6),%d0		|fix cmd1b to make it
306*4882a593Smuzhiyun	andl	#0x03c30000,%d0		|work for cmd3b
307*4882a593Smuzhiyun	bfextu	CMDREG1B(%a6){#13:#1},%d1	|extract bit 2
308*4882a593Smuzhiyun	lsll	#5,%d1
309*4882a593Smuzhiyun	swap	%d1
310*4882a593Smuzhiyun	orl	%d1,%d0			|put it in the right place
311*4882a593Smuzhiyun	bfextu	CMDREG1B(%a6){#10:#3},%d1	|extract bit 3,4,5
312*4882a593Smuzhiyun	lsll	#2,%d1
313*4882a593Smuzhiyun	swap	%d1
314*4882a593Smuzhiyun	orl	%d1,%d0			|put them in the right place
315*4882a593Smuzhiyun	movel	%d0,CMDREG3B(%a6)		|in the busy frame
316*4882a593Smuzhiyun
317*4882a593Smuzhiyun|
318*4882a593Smuzhiyun| Check if the frame to be restored is busy or unimp.
319*4882a593Smuzhiyun|** NOTE *** Bug fix for errata (0d43b #3)
320*4882a593Smuzhiyun| If the frame is unimp, we must create a busy frame to
321*4882a593Smuzhiyun| fix the bug with the nmnexc bits in cases in which they
322*4882a593Smuzhiyun| are set by a previous instruction and not cleared by
323*4882a593Smuzhiyun| the save. The frame will be unimp only if the final
324*4882a593Smuzhiyun| instruction in an emulation routine caused the exception
325*4882a593Smuzhiyun| by doing an fmove <ea>,fp0.  The exception operand, in
326*4882a593Smuzhiyun| internal format, is in fptemp.
327*4882a593Smuzhiyun|
328*4882a593Smuzhiyundo_clean:
329*4882a593Smuzhiyun	cmpib	#UNIMP_40_SIZE-4,1(%a7)
330*4882a593Smuzhiyun	bnes	do_con
331*4882a593Smuzhiyun	moveql	#13,%d0			|in orig, need to zero 14 lwords
332*4882a593Smuzhiyun	bras	do_build
333*4882a593Smuzhiyundo_con:
334*4882a593Smuzhiyun	cmpib	#UNIMP_41_SIZE-4,1(%a7)
335*4882a593Smuzhiyun	bnes	do_restore		|frame must be busy
336*4882a593Smuzhiyun	moveql	#11,%d0			|in rev, need to zero 12 lwords
337*4882a593Smuzhiyun
338*4882a593Smuzhiyundo_build:
339*4882a593Smuzhiyun	moveb	(%a7),VER_TMP(%a6)
340*4882a593Smuzhiyun	clrl	(%a7)
341*4882a593Smuzhiyunloop2:
342*4882a593Smuzhiyun	clrl	-(%a7)			|clear and dec a7
343*4882a593Smuzhiyun	dbra	%d0,loop2
344*4882a593Smuzhiyun|
345*4882a593Smuzhiyun| Use a1 as pointer into new frame.  a6 is not correct if an unimp or
346*4882a593Smuzhiyun| busy frame was created as the result of an exception on the final
347*4882a593Smuzhiyun| instruction of an emulation routine.
348*4882a593Smuzhiyun|
349*4882a593Smuzhiyun| We need to set the nmcexc bits if the exception is E1. Otherwise,
350*4882a593Smuzhiyun| the exc taken will be inex2.
351*4882a593Smuzhiyun|
352*4882a593Smuzhiyun	leal	BUSY_SIZE+LOCAL_SIZE(%a7),%a1	|init a1 for new frame
353*4882a593Smuzhiyun	moveb	VER_TMP(%a6),(%a7)	|write busy fmt word
354*4882a593Smuzhiyun	moveb	#BUSY_SIZE-4,1(%a7)
355*4882a593Smuzhiyun	movel	FP_SCR1(%a6),WBTEMP_EX(%a1)	|write
356*4882a593Smuzhiyun	movel	FP_SCR1+4(%a6),WBTEMP_HI(%a1)	|exceptional op to
357*4882a593Smuzhiyun	movel	FP_SCR1+8(%a6),WBTEMP_LO(%a1)	|wbtemp
358*4882a593Smuzhiyun|	btst.b	#E1,E_BYTE(%a1)
359*4882a593Smuzhiyun|	beq.b	do_restore
360*4882a593Smuzhiyun	bfextu	USER_FPSR(%a6){#17:#4},%d0	|get snan/operr/ovfl/unfl bits
361*4882a593Smuzhiyun	bfins	%d0,NMCEXC(%a1){#4:#4}	|and insert them in nmcexc
362*4882a593Smuzhiyun	movel	USER_FPSR(%a6),FPSR_SHADOW(%a1) |set exc bits
363*4882a593Smuzhiyun	orl	#sx_mask,E_BYTE(%a1)
364*4882a593Smuzhiyun
365*4882a593Smuzhiyundo_restore:
366*4882a593Smuzhiyun	moveml	USER_DA(%a6),%d0-%d1/%a0-%a1
367*4882a593Smuzhiyun	fmovemx USER_FP0(%a6),%fp0-%fp3
368*4882a593Smuzhiyun	fmoveml USER_FPCR(%a6),%fpcr/%fpsr/%fpiar
369*4882a593Smuzhiyun	frestore (%a7)+
370*4882a593Smuzhiyun	tstb	RES_FLG(%a6)	|RES_FLG indicates a "continuation" frame
371*4882a593Smuzhiyun	beq	cont
372*4882a593Smuzhiyun	bsr	bug1384
373*4882a593Smuzhiyuncont:
374*4882a593Smuzhiyun	unlk	%a6
375*4882a593Smuzhiyun|
376*4882a593Smuzhiyun| If trace mode enabled, then go to trace handler.  This handler
377*4882a593Smuzhiyun| cannot have any fp instructions.  If there are fp inst's and an
378*4882a593Smuzhiyun| exception has been restored into the machine then the exception
379*4882a593Smuzhiyun| will occur upon execution of the fp inst.  This is not desirable
380*4882a593Smuzhiyun| in the kernel (supervisor mode).  See MC68040 manual Section 9.3.8.
381*4882a593Smuzhiyun|
382*4882a593Smuzhiyunfinish_up:
383*4882a593Smuzhiyun	btstb	#7,(%a7)		|test T1 in SR
384*4882a593Smuzhiyun	bnes	g_trace
385*4882a593Smuzhiyun	btstb	#6,(%a7)		|test T0 in SR
386*4882a593Smuzhiyun	bnes	g_trace
387*4882a593Smuzhiyun	bral	fpsp_done
388*4882a593Smuzhiyun|
389*4882a593Smuzhiyun| Change integer stack to look like trace stack
390*4882a593Smuzhiyun| The address of the instruction that caused the
391*4882a593Smuzhiyun| exception is already in the integer stack (is
392*4882a593Smuzhiyun| the same as the saved friar)
393*4882a593Smuzhiyun|
394*4882a593Smuzhiyun| If the current frame is already a 6-word stack then all
395*4882a593Smuzhiyun| that needs to be done is to change the vector# to TRACE.
396*4882a593Smuzhiyun| If the frame is only a 4-word stack (meaning we got here
397*4882a593Smuzhiyun| on an Unsupported data type exception), then we need to grow
398*4882a593Smuzhiyun| the stack an extra 2 words and get the FPIAR from the FPU.
399*4882a593Smuzhiyun|
400*4882a593Smuzhiyung_trace:
401*4882a593Smuzhiyun	bftst	EXC_VEC-4(%sp){#0:#4}
402*4882a593Smuzhiyun	bne	g_easy
403*4882a593Smuzhiyun
404*4882a593Smuzhiyun	subw	#4,%sp		| make room
405*4882a593Smuzhiyun	movel	4(%sp),(%sp)
406*4882a593Smuzhiyun	movel	8(%sp),4(%sp)
407*4882a593Smuzhiyun	subw	#BUSY_SIZE,%sp
408*4882a593Smuzhiyun	fsave	(%sp)
409*4882a593Smuzhiyun	fmovel	%fpiar,BUSY_SIZE+EXC_EA-4(%sp)
410*4882a593Smuzhiyun	frestore (%sp)
411*4882a593Smuzhiyun	addw	#BUSY_SIZE,%sp
412*4882a593Smuzhiyun
413*4882a593Smuzhiyung_easy:
414*4882a593Smuzhiyun	movew	#TRACE_VEC,EXC_VEC-4(%a7)
415*4882a593Smuzhiyun	bral	real_trace
416*4882a593Smuzhiyun|
417*4882a593Smuzhiyun|  This is a work-around for hardware bug 1384.
418*4882a593Smuzhiyun|
419*4882a593Smuzhiyunbug1384:
420*4882a593Smuzhiyun	link	%a5,#0
421*4882a593Smuzhiyun	fsave	-(%sp)
422*4882a593Smuzhiyun	cmpib	#0x41,(%sp)	| check for correct frame
423*4882a593Smuzhiyun	beq	frame_41
424*4882a593Smuzhiyun	bgt	nofix		| if more advanced mask, do nada
425*4882a593Smuzhiyun
426*4882a593Smuzhiyunframe_40:
427*4882a593Smuzhiyun	tstb	1(%sp)		| check to see if idle
428*4882a593Smuzhiyun	bne	notidle
429*4882a593Smuzhiyunidle40:
430*4882a593Smuzhiyun	clrl	(%sp)		| get rid of old fsave frame
431*4882a593Smuzhiyun        movel  %d1,USER_D1(%a6)  | save d1
432*4882a593Smuzhiyun	movew	#8,%d1		| place unimp frame instead
433*4882a593Smuzhiyunloop40:	clrl	-(%sp)
434*4882a593Smuzhiyun	dbra	%d1,loop40
435*4882a593Smuzhiyun        movel  USER_D1(%a6),%d1  | restore d1
436*4882a593Smuzhiyun	movel	#0x40280000,-(%sp)
437*4882a593Smuzhiyun	frestore (%sp)+
438*4882a593Smuzhiyun	unlk	%a5
439*4882a593Smuzhiyun	rts
440*4882a593Smuzhiyun
441*4882a593Smuzhiyunframe_41:
442*4882a593Smuzhiyun	tstb	1(%sp)		| check to see if idle
443*4882a593Smuzhiyun	bne	notidle
444*4882a593Smuzhiyunidle41:
445*4882a593Smuzhiyun	clrl	(%sp)		| get rid of old fsave frame
446*4882a593Smuzhiyun        movel  %d1,USER_D1(%a6)  | save d1
447*4882a593Smuzhiyun	movew	#10,%d1		| place unimp frame instead
448*4882a593Smuzhiyunloop41:	clrl	-(%sp)
449*4882a593Smuzhiyun	dbra	%d1,loop41
450*4882a593Smuzhiyun        movel  USER_D1(%a6),%d1  | restore d1
451*4882a593Smuzhiyun	movel	#0x41300000,-(%sp)
452*4882a593Smuzhiyun	frestore (%sp)+
453*4882a593Smuzhiyun	unlk	%a5
454*4882a593Smuzhiyun	rts
455*4882a593Smuzhiyun
456*4882a593Smuzhiyunnotidle:
457*4882a593Smuzhiyun	bclrb	#etemp15_bit,-40(%a5)
458*4882a593Smuzhiyun	frestore (%sp)+
459*4882a593Smuzhiyun	unlk	%a5
460*4882a593Smuzhiyun	rts
461*4882a593Smuzhiyun
462*4882a593Smuzhiyunnofix:
463*4882a593Smuzhiyun	frestore (%sp)+
464*4882a593Smuzhiyun	unlk	%a5
465*4882a593Smuzhiyun	rts
466*4882a593Smuzhiyun
467*4882a593Smuzhiyun	|end
468