xref: /rk3399_ARM-atf/bl31/aarch64/ea_delegate.S (revision f2de48cb143c20ccd7a9c141df3d34cae74049de)
1/*
2 * Copyright (c) 2018-2022, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7
8#include <assert_macros.S>
9#include <asm_macros.S>
10#include <assert_macros.S>
11#include <bl31/ea_handle.h>
12#include <context.h>
13#include <lib/extensions/ras_arch.h>
14#include <cpu_macros.S>
15#include <context.h>
16
17	.globl	handle_lower_el_ea_esb
18	.globl  handle_lower_el_async_ea
19	.globl	enter_lower_el_sync_ea
20	.globl	enter_lower_el_async_ea
21
22
23/*
24 * Function to delegate External Aborts synchronized by ESB instruction at EL3
25 * vector entry. This function assumes GP registers x0-x29 have been saved, and
26 * are available for use. It delegates the handling of the EA to platform
27 * handler, and returns only upon successfully handling the EA; otherwise
28 * panics. On return from this function, the original exception handler is
29 * expected to resume.
30 */
31func handle_lower_el_ea_esb
32	mov	x0, #ERROR_EA_ESB
33	mrs	x1, DISR_EL1
34	b	ea_proceed
35endfunc handle_lower_el_ea_esb
36
37
38/*
39 * This function forms the tail end of Synchronous Exception entry from lower
40 * EL, and expects to handle Synchronous External Aborts from lower EL and CPU
41 * Implementation Defined Exceptions. If any other kind of exception is detected,
42 * then this function reports unhandled exception.
43 *
44 * Since it's part of exception vector, this function doesn't expect any GP
45 * registers to have been saved. It delegates the handling of the EA to platform
46 * handler, and upon successfully handling the EA, exits EL3; otherwise panics.
47 */
48func enter_lower_el_sync_ea
49	/*
50	 * Explicitly save x30 so as to free up a register and to enable
51	 * branching.
52	 */
53	str	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
54
55	mrs	x30, esr_el3
56	ubfx	x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH
57
58	/* Check for I/D aborts from lower EL */
59	cmp	x30, #EC_IABORT_LOWER_EL
60	b.eq	1f
61
62	cmp	x30, #EC_DABORT_LOWER_EL
63	b.eq	1f
64
65	/* Save GP registers */
66	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
67	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
68	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
69
70	/* Get the cpu_ops pointer */
71	bl	get_cpu_ops_ptr
72
73	/* Get the cpu_ops exception handler */
74	ldr	x0, [x0, #CPU_E_HANDLER_FUNC]
75
76	/*
77	 * If the reserved function pointer is NULL, this CPU does not have an
78	 * implementation defined exception handler function
79	 */
80	cbz	x0, 2f
81	mrs	x1, esr_el3
82	ubfx	x1, x1, #ESR_EC_SHIFT, #ESR_EC_LENGTH
83	blr	x0
84	b	2f
85
861:
87	/* Test for EA bit in the instruction syndrome */
88	mrs	x30, esr_el3
89	tbz	x30, #ESR_ISS_EABORT_EA_BIT, 3f
90
91	/*
92	 * Save general purpose and ARMv8.3-PAuth registers (if enabled).
93	 * If Secure Cycle Counter is not disabled in MDCR_EL3 when
94	 * ARMv8.5-PMU is implemented, save PMCR_EL0 and disable Cycle Counter.
95	 * Also set the PSTATE to a known state.
96	 */
97	bl	prepare_el3_entry
98
99#if ENABLE_PAUTH
100	/* Load and program APIAKey firmware key */
101	bl	pauth_load_bl31_apiakey
102#endif
103
104	/* Setup exception class and syndrome arguments for platform handler */
105	mov	x0, #ERROR_EA_SYNC
106	mrs	x1, esr_el3
107	bl	delegate_sync_ea
108
109	/* el3_exit assumes SP_EL0 on entry */
110	msr	spsel, #MODE_SP_EL0
111	b	el3_exit
1122:
113	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
114	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
115	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
116
1173:
118	/* Synchronous exceptions other than the above are assumed to be EA */
119	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
120	no_ret	report_unhandled_exception
121endfunc enter_lower_el_sync_ea
122
123
124/*
125 * This function handles SErrors from lower ELs.
126 *
127 * Since it's part of exception vector, this function doesn't expect any GP
128 * registers to have been saved. It delegates the handling of the EA to platform
129 * handler, and upon successfully handling the EA, exits EL3; otherwise panics.
130 */
131func enter_lower_el_async_ea
132	/*
133	 * Explicitly save x30 so as to free up a register and to enable
134	 * branching
135	 */
136	str	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
137
138handle_lower_el_async_ea:
139	/*
140	 * Save general purpose and ARMv8.3-PAuth registers (if enabled).
141	 * If Secure Cycle Counter is not disabled in MDCR_EL3 when
142	 * ARMv8.5-PMU is implemented, save PMCR_EL0 and disable Cycle Counter.
143	 * Also set the PSTATE to a known state.
144	 */
145	bl	prepare_el3_entry
146
147#if ENABLE_PAUTH
148	/* Load and program APIAKey firmware key */
149	bl	pauth_load_bl31_apiakey
150#endif
151
152	/* Setup exception class and syndrome arguments for platform handler */
153	mov	x0, #ERROR_EA_ASYNC
154	mrs	x1, esr_el3
155	bl	delegate_async_ea
156
157	/* el3_exit assumes SP_EL0 on entry */
158	msr	spsel, #MODE_SP_EL0
159	b	el3_exit
160endfunc enter_lower_el_async_ea
161
162
163/*
164 * Prelude for Synchronous External Abort handling. This function assumes that
165 * all GP registers have been saved by the caller.
166 *
167 * x0: EA reason
168 * x1: EA syndrome
169 */
170func delegate_sync_ea
171#if RAS_EXTENSION
172	/*
173	 * Check for Uncontainable error type. If so, route to the platform
174	 * fatal error handler rather than the generic EA one.
175	 */
176	ubfx    x2, x1, #EABORT_SET_SHIFT, #EABORT_SET_WIDTH
177	cmp     x2, #ERROR_STATUS_SET_UC
178	b.ne    1f
179
180	/* Check fault status code */
181	ubfx    x3, x1, #EABORT_DFSC_SHIFT, #EABORT_DFSC_WIDTH
182	cmp     x3, #SYNC_EA_FSC
183	b.ne    1f
184
185	no_ret  plat_handle_uncontainable_ea
1861:
187#endif
188
189	b       ea_proceed
190endfunc delegate_sync_ea
191
192
193/*
194 * Prelude for Asynchronous External Abort handling. This function assumes that
195 * all GP registers have been saved by the caller.
196 *
197 * x0: EA reason
198 * x1: EA syndrome
199 */
200func delegate_async_ea
201#if RAS_EXTENSION
202	/*
203	 * Check for Implementation Defined Syndrome. If so, skip checking
204	 * Uncontainable error type from the syndrome as the format is unknown.
205	 */
206	tbnz	x1, #SERROR_IDS_BIT, 1f
207
208	/*
209	 * Check for Uncontainable error type. If so, route to the platform
210	 * fatal error handler rather than the generic EA one.
211	 */
212	ubfx	x2, x1, #EABORT_AET_SHIFT, #EABORT_AET_WIDTH
213	cmp	x2, #ERROR_STATUS_UET_UC
214	b.ne	1f
215
216	/* Check DFSC for SError type */
217	ubfx	x3, x1, #EABORT_DFSC_SHIFT, #EABORT_DFSC_WIDTH
218	cmp	x3, #DFSC_SERROR
219	b.ne	1f
220
221	no_ret	plat_handle_uncontainable_ea
2221:
223#endif
224
225	b	ea_proceed
226endfunc delegate_async_ea
227
228
229/*
230 * Delegate External Abort handling to platform's EA handler. This function
231 * assumes that all GP registers have been saved by the caller.
232 *
233 * x0: EA reason
234 * x1: EA syndrome
235 */
236func ea_proceed
237	/*
238	 * If the ESR loaded earlier is not zero, we were processing an EA
239	 * already, and this is a double fault.
240	 */
241	ldr	x5, [sp, #CTX_EL3STATE_OFFSET + CTX_ESR_EL3]
242	cbz	x5, 1f
243	no_ret	plat_handle_double_fault
244
2451:
246	/* Save EL3 state */
247	mrs	x2, spsr_el3
248	mrs	x3, elr_el3
249	stp	x2, x3, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
250
251	/*
252	 * Save ESR as handling might involve lower ELs, and returning back to
253	 * EL3 from there would trample the original ESR.
254	 */
255	mrs	x4, scr_el3
256	mrs	x5, esr_el3
257	stp	x4, x5, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
258
259	/*
260	 * Setup rest of arguments, and call platform External Abort handler.
261	 *
262	 * x0: EA reason (already in place)
263	 * x1: Exception syndrome (already in place).
264	 * x2: Cookie (unused for now).
265	 * x3: Context pointer.
266	 * x4: Flags (security state from SCR for now).
267	 */
268	mov	x2, xzr
269	mov	x3, sp
270	ubfx	x4, x4, #0, #1
271
272	/* Switch to runtime stack */
273	ldr	x5, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
274	msr	spsel, #MODE_SP_EL0
275	mov	sp, x5
276
277	mov	x29, x30
278#if ENABLE_ASSERTIONS
279	/* Stash the stack pointer */
280	mov	x28, sp
281#endif
282	bl	plat_ea_handler
283
284#if ENABLE_ASSERTIONS
285	/*
286	 * Error handling flows might involve long jumps; so upon returning from
287	 * the platform error handler, validate that the we've completely
288	 * unwound the stack.
289	 */
290	mov	x27, sp
291	cmp	x28, x27
292	ASM_ASSERT(eq)
293#endif
294
295	/* Make SP point to context */
296	msr	spsel, #MODE_SP_ELX
297
298	/* Restore EL3 state and ESR */
299	ldp	x1, x2, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
300	msr	spsr_el3, x1
301	msr	elr_el3, x2
302
303	/* Restore ESR_EL3 and SCR_EL3 */
304	ldp	x3, x4, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
305	msr	scr_el3, x3
306	msr	esr_el3, x4
307
308#if ENABLE_ASSERTIONS
309	cmp	x4, xzr
310	ASM_ASSERT(ne)
311#endif
312
313	/* Clear ESR storage */
314	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_ESR_EL3]
315
316	ret	x29
317endfunc ea_proceed
318