xref: /OK3568_Linux_fs/u-boot/arch/arc/lib/ints_low.S (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun/*
2*4882a593Smuzhiyun * Copyright (C) 2013-2015 Synopsys, Inc. All rights reserved.
3*4882a593Smuzhiyun *
4*4882a593Smuzhiyun * SPDX-License-Identifier:	GPL-2.0+
5*4882a593Smuzhiyun */
6*4882a593Smuzhiyun
7*4882a593Smuzhiyun#include <linux/linkage.h>
8*4882a593Smuzhiyun
9*4882a593Smuzhiyun/*
10*4882a593Smuzhiyun * Note on the LD/ST addressing modes with address register write-back
11*4882a593Smuzhiyun *
12*4882a593Smuzhiyun * LD.a same as LD.aw
13*4882a593Smuzhiyun *
14*4882a593Smuzhiyun * LD.a    reg1, [reg2, x]  => Pre Incr
15*4882a593Smuzhiyun *      Eff Addr for load = [reg2 + x]
16*4882a593Smuzhiyun *
17*4882a593Smuzhiyun * LD.ab   reg1, [reg2, x]  => Post Incr
18*4882a593Smuzhiyun *      Eff Addr for load = [reg2]
19*4882a593Smuzhiyun */
20*4882a593Smuzhiyun
21*4882a593Smuzhiyun.macro PUSH reg
22*4882a593Smuzhiyun	st.a	\reg, [%sp, -4]
23*4882a593Smuzhiyun.endm
24*4882a593Smuzhiyun
25*4882a593Smuzhiyun.macro PUSHAX aux
26*4882a593Smuzhiyun	lr	%r9, [\aux]
27*4882a593Smuzhiyun	PUSH	%r9
28*4882a593Smuzhiyun.endm
29*4882a593Smuzhiyun
30*4882a593Smuzhiyun.macro  SAVE_R1_TO_R24
31*4882a593Smuzhiyun	PUSH	%r1
32*4882a593Smuzhiyun	PUSH	%r2
33*4882a593Smuzhiyun	PUSH	%r3
34*4882a593Smuzhiyun	PUSH	%r4
35*4882a593Smuzhiyun	PUSH	%r5
36*4882a593Smuzhiyun	PUSH	%r6
37*4882a593Smuzhiyun	PUSH	%r7
38*4882a593Smuzhiyun	PUSH	%r8
39*4882a593Smuzhiyun	PUSH	%r9
40*4882a593Smuzhiyun	PUSH	%r10
41*4882a593Smuzhiyun	PUSH	%r11
42*4882a593Smuzhiyun	PUSH	%r12
43*4882a593Smuzhiyun	PUSH	%r13
44*4882a593Smuzhiyun	PUSH	%r14
45*4882a593Smuzhiyun	PUSH	%r15
46*4882a593Smuzhiyun	PUSH	%r16
47*4882a593Smuzhiyun	PUSH	%r17
48*4882a593Smuzhiyun	PUSH	%r18
49*4882a593Smuzhiyun	PUSH	%r19
50*4882a593Smuzhiyun	PUSH	%r20
51*4882a593Smuzhiyun	PUSH	%r21
52*4882a593Smuzhiyun	PUSH	%r22
53*4882a593Smuzhiyun	PUSH	%r23
54*4882a593Smuzhiyun	PUSH	%r24
55*4882a593Smuzhiyun.endm
56*4882a593Smuzhiyun
57*4882a593Smuzhiyun.macro SAVE_ALL_SYS
58*4882a593Smuzhiyun	/* saving %r0 to reg->r0 in advance since we read %ecr into it */
59*4882a593Smuzhiyun	st	%r0, [%sp, -8]
60*4882a593Smuzhiyun	lr	%r0, [%ecr]	/* all stack addressing is manual so far */
61*4882a593Smuzhiyun	st	%r0, [%sp]
62*4882a593Smuzhiyun	st	%sp, [%sp, -4]
63*4882a593Smuzhiyun	/* now move %sp to reg->r0 position so we can do "push" automatically */
64*4882a593Smuzhiyun	sub	%sp, %sp, 8
65*4882a593Smuzhiyun
66*4882a593Smuzhiyun	SAVE_R1_TO_R24
67*4882a593Smuzhiyun	PUSH	%r25
68*4882a593Smuzhiyun	PUSH	%gp
69*4882a593Smuzhiyun	PUSH	%fp
70*4882a593Smuzhiyun	PUSH	%blink
71*4882a593Smuzhiyun	PUSHAX	%eret
72*4882a593Smuzhiyun	PUSHAX	%erstatus
73*4882a593Smuzhiyun	PUSH	%lp_count
74*4882a593Smuzhiyun	PUSHAX	%lp_end
75*4882a593Smuzhiyun	PUSHAX	%lp_start
76*4882a593Smuzhiyun	PUSHAX	%erbta
77*4882a593Smuzhiyun.endm
78*4882a593Smuzhiyun
79*4882a593Smuzhiyun.macro SAVE_EXCEPTION_SOURCE
80*4882a593Smuzhiyun#ifdef CONFIG_MMU
81*4882a593Smuzhiyun	/* If MMU exists exception faulting address is loaded in EFA reg */
82*4882a593Smuzhiyun	lr	%r0, [%efa]
83*4882a593Smuzhiyun#else
84*4882a593Smuzhiyun	/* Otherwise in ERET (exception return) reg */
85*4882a593Smuzhiyun	lr	%r0, [%eret]
86*4882a593Smuzhiyun#endif
87*4882a593Smuzhiyun.endm
88*4882a593Smuzhiyun
89*4882a593SmuzhiyunENTRY(memory_error)
90*4882a593Smuzhiyun	SAVE_ALL_SYS
91*4882a593Smuzhiyun	SAVE_EXCEPTION_SOURCE
92*4882a593Smuzhiyun	mov	%r1, %sp
93*4882a593Smuzhiyun	j	do_memory_error
94*4882a593SmuzhiyunENDPROC(memory_error)
95*4882a593Smuzhiyun
96*4882a593SmuzhiyunENTRY(instruction_error)
97*4882a593Smuzhiyun	SAVE_ALL_SYS
98*4882a593Smuzhiyun	SAVE_EXCEPTION_SOURCE
99*4882a593Smuzhiyun	mov	%r1, %sp
100*4882a593Smuzhiyun	j	do_instruction_error
101*4882a593SmuzhiyunENDPROC(instruction_error)
102*4882a593Smuzhiyun
103*4882a593SmuzhiyunENTRY(interrupt_handler)
104*4882a593Smuzhiyun	/* Todo - save and restore CPU context when interrupts will be in use */
105*4882a593Smuzhiyun	bl	do_interrupt_handler
106*4882a593Smuzhiyun	rtie
107*4882a593SmuzhiyunENDPROC(interrupt_handler)
108*4882a593Smuzhiyun
109*4882a593SmuzhiyunENTRY(EV_MachineCheck)
110*4882a593Smuzhiyun	SAVE_ALL_SYS
111*4882a593Smuzhiyun	SAVE_EXCEPTION_SOURCE
112*4882a593Smuzhiyun	mov	%r1, %sp
113*4882a593Smuzhiyun	j	do_machine_check_fault
114*4882a593SmuzhiyunENDPROC(EV_MachineCheck)
115*4882a593Smuzhiyun
116*4882a593SmuzhiyunENTRY(EV_TLBMissI)
117*4882a593Smuzhiyun	SAVE_ALL_SYS
118*4882a593Smuzhiyun	mov	%r0, %sp
119*4882a593Smuzhiyun	j	do_itlb_miss
120*4882a593SmuzhiyunENDPROC(EV_TLBMissI)
121*4882a593Smuzhiyun
122*4882a593SmuzhiyunENTRY(EV_TLBMissD)
123*4882a593Smuzhiyun	SAVE_ALL_SYS
124*4882a593Smuzhiyun	mov	%r0, %sp
125*4882a593Smuzhiyun	j	do_dtlb_miss
126*4882a593SmuzhiyunENDPROC(EV_TLBMissD)
127*4882a593Smuzhiyun
128*4882a593SmuzhiyunENTRY(EV_TLBProtV)
129*4882a593Smuzhiyun	SAVE_ALL_SYS
130*4882a593Smuzhiyun	SAVE_EXCEPTION_SOURCE
131*4882a593Smuzhiyun	mov	%r1, %sp
132*4882a593Smuzhiyun	j	do_tlb_prot_violation
133*4882a593SmuzhiyunENDPROC(EV_TLBProtV)
134*4882a593Smuzhiyun
135*4882a593SmuzhiyunENTRY(EV_PrivilegeV)
136*4882a593Smuzhiyun	SAVE_ALL_SYS
137*4882a593Smuzhiyun	mov	%r0, %sp
138*4882a593Smuzhiyun	j	do_privilege_violation
139*4882a593SmuzhiyunENDPROC(EV_PrivilegeV)
140*4882a593Smuzhiyun
141*4882a593SmuzhiyunENTRY(EV_Trap)
142*4882a593Smuzhiyun	SAVE_ALL_SYS
143*4882a593Smuzhiyun	mov	%r0, %sp
144*4882a593Smuzhiyun	j	do_trap
145*4882a593SmuzhiyunENDPROC(EV_Trap)
146*4882a593Smuzhiyun
147*4882a593SmuzhiyunENTRY(EV_Extension)
148*4882a593Smuzhiyun	SAVE_ALL_SYS
149*4882a593Smuzhiyun	mov	%r0, %sp
150*4882a593Smuzhiyun	j	do_extension
151*4882a593SmuzhiyunENDPROC(EV_Extension)
152*4882a593Smuzhiyun
153*4882a593Smuzhiyun#ifdef CONFIG_ISA_ARCV2
154*4882a593SmuzhiyunENTRY(EV_SWI)
155*4882a593Smuzhiyun	SAVE_ALL_SYS
156*4882a593Smuzhiyun	mov	%r0, %sp
157*4882a593Smuzhiyun	j	do_swi
158*4882a593SmuzhiyunENDPROC(EV_SWI)
159*4882a593Smuzhiyun
160*4882a593SmuzhiyunENTRY(EV_DivZero)
161*4882a593Smuzhiyun	SAVE_ALL_SYS
162*4882a593Smuzhiyun	SAVE_EXCEPTION_SOURCE
163*4882a593Smuzhiyun	mov	%r1, %sp
164*4882a593Smuzhiyun	j	do_divzero
165*4882a593SmuzhiyunENDPROC(EV_DivZero)
166*4882a593Smuzhiyun
167*4882a593SmuzhiyunENTRY(EV_DCError)
168*4882a593Smuzhiyun	SAVE_ALL_SYS
169*4882a593Smuzhiyun	mov	%r0, %sp
170*4882a593Smuzhiyun	j	do_dcerror
171*4882a593SmuzhiyunENDPROC(EV_DCError)
172*4882a593Smuzhiyun
173*4882a593SmuzhiyunENTRY(EV_Maligned)
174*4882a593Smuzhiyun	SAVE_ALL_SYS
175*4882a593Smuzhiyun	SAVE_EXCEPTION_SOURCE
176*4882a593Smuzhiyun	mov	%r1, %sp
177*4882a593Smuzhiyun	j	do_maligned
178*4882a593SmuzhiyunENDPROC(EV_Maligned)
179*4882a593Smuzhiyun#endif
180