xref: /rk3399_ARM-atf/bl31/aarch64/ea_delegate.S (revision f87e54f73cfee5042df526af6185ac6d9653a8f5)
1df8f3188SJeenu Viswambharan/*
297215e0fSDaniel Boulby * Copyright (c) 2018-2022, ARM Limited and Contributors. All rights reserved.
3df56e9d1SVarun Wadekar * Copyright (c) 2022, NVIDIA Corporation. All rights reserved.
4df8f3188SJeenu Viswambharan *
5df8f3188SJeenu Viswambharan * SPDX-License-Identifier: BSD-3-Clause
6df8f3188SJeenu Viswambharan */
7df8f3188SJeenu Viswambharan
8df8f3188SJeenu Viswambharan
9d5a23af5SJeenu Viswambharan#include <assert_macros.S>
10df8f3188SJeenu Viswambharan#include <asm_macros.S>
11ee6ff1bbSJeenu Viswambharan#include <assert_macros.S>
1209d40e0eSAntonio Nino Diaz#include <bl31/ea_handle.h>
13df8f3188SJeenu Viswambharan#include <context.h>
1409d40e0eSAntonio Nino Diaz#include <lib/extensions/ras_arch.h>
1580942622Slaurenw-arm#include <cpu_macros.S>
1680942622Slaurenw-arm#include <context.h>
17df8f3188SJeenu Viswambharan
186f7de9a8SManish Pandey	.globl	handle_lower_el_sync_ea
19c2d32a5fSMadhukar Pappireddy	.globl	handle_lower_el_async_ea
20d04c04a4SManish Pandey	.globl	handle_pending_async_ea
21d04c04a4SManish Pandey	.globl	reflect_pending_async_ea_to_lower_el
22df8f3188SJeenu Viswambharan/*
23df8f3188SJeenu Viswambharan * This function forms the tail end of Synchronous Exception entry from lower
2480942622Slaurenw-arm * EL, and expects to handle Synchronous External Aborts from lower EL and CPU
2580942622Slaurenw-arm * Implementation Defined Exceptions. If any other kind of exception is detected,
2680942622Slaurenw-arm * then this function reports unhandled exception.
27df8f3188SJeenu Viswambharan *
286f7de9a8SManish Pandey * It delegates the handling of the EA to platform handler, and upon successfully
296f7de9a8SManish Pandey * handling the EA, exits EL3; otherwise panics.
306f7de9a8SManish Pandey *
316f7de9a8SManish Pandey * This function assumes x30 has been saved.
32df8f3188SJeenu Viswambharan */
336f7de9a8SManish Pandeyfunc handle_lower_el_sync_ea
34df8f3188SJeenu Viswambharan	mrs	x30, esr_el3
35df8f3188SJeenu Viswambharan	ubfx	x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH
36df8f3188SJeenu Viswambharan
37df8f3188SJeenu Viswambharan	/* Check for I/D aborts from lower EL */
38df8f3188SJeenu Viswambharan	cmp	x30, #EC_IABORT_LOWER_EL
39df8f3188SJeenu Viswambharan	b.eq	1f
40df8f3188SJeenu Viswambharan
41df8f3188SJeenu Viswambharan	cmp	x30, #EC_DABORT_LOWER_EL
4280942622Slaurenw-arm	b.eq	1f
4380942622Slaurenw-arm
4480942622Slaurenw-arm	/* Save GP registers */
4580942622Slaurenw-arm	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
4680942622Slaurenw-arm	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
4780942622Slaurenw-arm	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
4880942622Slaurenw-arm
4980942622Slaurenw-arm	/* Get the cpu_ops pointer */
5080942622Slaurenw-arm	bl	get_cpu_ops_ptr
5180942622Slaurenw-arm
5280942622Slaurenw-arm	/* Get the cpu_ops exception handler */
5380942622Slaurenw-arm	ldr	x0, [x0, #CPU_E_HANDLER_FUNC]
5480942622Slaurenw-arm
5580942622Slaurenw-arm	/*
5680942622Slaurenw-arm	 * If the reserved function pointer is NULL, this CPU does not have an
5780942622Slaurenw-arm	 * implementation defined exception handler function
5880942622Slaurenw-arm	 */
5980942622Slaurenw-arm	cbz	x0, 2f
6080942622Slaurenw-arm	mrs	x1, esr_el3
6180942622Slaurenw-arm	ubfx	x1, x1, #ESR_EC_SHIFT, #ESR_EC_LENGTH
6280942622Slaurenw-arm	blr	x0
6380942622Slaurenw-arm	b	2f
64df8f3188SJeenu Viswambharan
65df8f3188SJeenu Viswambharan1:
66e290a8fcSAlexei Fedorov	/*
67ed108b56SAlexei Fedorov	 * Save general purpose and ARMv8.3-PAuth registers (if enabled).
681d6d6802SBoyan Karatotev	 * Also save PMCR_EL0 and set the PSTATE to a known state.
69e290a8fcSAlexei Fedorov	 */
7097215e0fSDaniel Boulby	bl	prepare_el3_entry
71e290a8fcSAlexei Fedorov
72b86048c4SAntonio Nino Diaz#if ENABLE_PAUTH
73ed108b56SAlexei Fedorov	/* Load and program APIAKey firmware key */
74ed108b56SAlexei Fedorov	bl	pauth_load_bl31_apiakey
75b86048c4SAntonio Nino Diaz#endif
765283962eSAntonio Nino Diaz
77df8f3188SJeenu Viswambharan	/* Setup exception class and syndrome arguments for platform handler */
78df8f3188SJeenu Viswambharan	mov	x0, #ERROR_EA_SYNC
79df8f3188SJeenu Viswambharan	mrs	x1, esr_el3
80bb9549baSJan Dabros	bl	delegate_sync_ea
81df8f3188SJeenu Viswambharan
82bb9549baSJan Dabros	/* el3_exit assumes SP_EL0 on entry */
83bb9549baSJan Dabros	msr	spsel, #MODE_SP_EL0
84bb9549baSJan Dabros	b	el3_exit
85df8f3188SJeenu Viswambharan2:
8680942622Slaurenw-arm	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
8780942622Slaurenw-arm	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
8880942622Slaurenw-arm	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
8980942622Slaurenw-arm
90df8f3188SJeenu Viswambharan	/* Synchronous exceptions other than the above are assumed to be EA */
91df8f3188SJeenu Viswambharan	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
92df8f3188SJeenu Viswambharan	no_ret	report_unhandled_exception
936f7de9a8SManish Pandeyendfunc handle_lower_el_sync_ea
94df8f3188SJeenu Viswambharan
95df8f3188SJeenu Viswambharan
96df8f3188SJeenu Viswambharan/*
97df8f3188SJeenu Viswambharan * This function handles SErrors from lower ELs.
98df8f3188SJeenu Viswambharan *
996f7de9a8SManish Pandey * It delegates the handling of the EA to platform handler, and upon successfully
1006f7de9a8SManish Pandey * handling the EA, exits EL3; otherwise panics.
1016f7de9a8SManish Pandey *
1026f7de9a8SManish Pandey * This function assumes x30 has been saved.
103df8f3188SJeenu Viswambharan */
1046f7de9a8SManish Pandeyfunc handle_lower_el_async_ea
105df8f3188SJeenu Viswambharan
106e290a8fcSAlexei Fedorov	/*
107ed108b56SAlexei Fedorov	 * Save general purpose and ARMv8.3-PAuth registers (if enabled).
1081d6d6802SBoyan Karatotev	 * Also save PMCR_EL0 and set the PSTATE to a known state.
109e290a8fcSAlexei Fedorov	 */
11097215e0fSDaniel Boulby	bl	prepare_el3_entry
111e290a8fcSAlexei Fedorov
112b86048c4SAntonio Nino Diaz#if ENABLE_PAUTH
113ed108b56SAlexei Fedorov	/* Load and program APIAKey firmware key */
114ed108b56SAlexei Fedorov	bl	pauth_load_bl31_apiakey
115b86048c4SAntonio Nino Diaz#endif
1165283962eSAntonio Nino Diaz
117df8f3188SJeenu Viswambharan	/* Setup exception class and syndrome arguments for platform handler */
118df8f3188SJeenu Viswambharan	mov	x0, #ERROR_EA_ASYNC
119df8f3188SJeenu Viswambharan	mrs	x1, esr_el3
120bb9549baSJan Dabros	bl	delegate_async_ea
121bb9549baSJan Dabros
122bb9549baSJan Dabros	/* el3_exit assumes SP_EL0 on entry */
123bb9549baSJan Dabros	msr	spsel, #MODE_SP_EL0
124bb9549baSJan Dabros	b	el3_exit
1256f7de9a8SManish Pandeyendfunc handle_lower_el_async_ea
126df8f3188SJeenu Viswambharan
127d04c04a4SManish Pandey/*
128d04c04a4SManish Pandey * NOTE 1 : Synchronized async EA handling
129d04c04a4SManish Pandey *
130d04c04a4SManish Pandey * Comment here applicable to following two functions
131d04c04a4SManish Pandey *   - handle_pending_async_ea
132d04c04a4SManish Pandey *   - reflect_pending_async_ea_to_lower_el
133d04c04a4SManish Pandey *
134d04c04a4SManish Pandey * Must be called from exception vector directly.
135d04c04a4SManish Pandey *
136d04c04a4SManish Pandey * These special handling is required to cater for handling async EA from
137d04c04a4SManish Pandey * lower EL synchronized at EL3 entry.
138d04c04a4SManish Pandey *
139d04c04a4SManish Pandey * This scenario may arise when there is an error (EA) in the system which is not
140d04c04a4SManish Pandey * yet signaled to PE while executing in lower EL. During entry into EL3, the errors
141d04c04a4SManish Pandey * are synchronized either implicitly or explicitly causing async EA to pend at EL3.
142d04c04a4SManish Pandey *
143d04c04a4SManish Pandey * On detecting the pending EA (via ISR_EL1.A), based on routing model of EA
144d04c04a4SManish Pandey * either handle it in EL3 using "handle_pending_async_ea" (FFH)  or return to
145d04c04a4SManish Pandey * lower EL using "reflect_pending_async_ea_to_lower_el" (KFH) .
146d04c04a4SManish Pandey */
147d04c04a4SManish Pandey
148d04c04a4SManish Pandey/*
149d04c04a4SManish Pandey * Refer to NOTE 1 : Firmware First Handling (FFH)
150d04c04a4SManish Pandey *  Called when FFH is enabled and outgoing world is Non-Secure (scr_el3.ea = 1).
151d04c04a4SManish Pandey *
152d04c04a4SManish Pandey * This function assumes x30 has been saved.
153d04c04a4SManish Pandey */
154d04c04a4SManish Pandey#if HANDLE_EA_EL3_FIRST_NS
155d04c04a4SManish Pandeyfunc handle_pending_async_ea
156d04c04a4SManish Pandey	/*
157d04c04a4SManish Pandey	 * Prepare for nested handling of EA. Stash sysregs clobbered by nested
158d04c04a4SManish Pandey	 * exception and handler
159d04c04a4SManish Pandey	 */
160d04c04a4SManish Pandey	str	x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_GPREG_LR]
161d04c04a4SManish Pandey	mrs	x30, esr_el3
162d04c04a4SManish Pandey	str	x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ESR_EL3]
163d04c04a4SManish Pandey	mrs	x30, spsr_el3
164d04c04a4SManish Pandey	str	x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_SPSR_EL3]
165d04c04a4SManish Pandey	mrs	x30, elr_el3
166d04c04a4SManish Pandey	str	x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ELR_EL3]
167d04c04a4SManish Pandey
168d04c04a4SManish Pandey	mov	x30, #1
169d04c04a4SManish Pandey	str	x30, [sp, #CTX_EL3STATE_OFFSET + CTX_NESTED_EA_FLAG]
170d04c04a4SManish Pandey	/*
171d04c04a4SManish Pandey	 * Restore the original x30 saved as part of entering EL3. This is not
172d04c04a4SManish Pandey	 * required for the current function but for EL3 SError vector entry
173d04c04a4SManish Pandey	 * once PSTATE.A bit is unmasked. We restore x30 and then the same
174d04c04a4SManish Pandey	 * value is stored in EL3 SError vector entry.
175d04c04a4SManish Pandey	 */
176d04c04a4SManish Pandey	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
177d04c04a4SManish Pandey
178d04c04a4SManish Pandey	/*
179d04c04a4SManish Pandey	 * After clearing PSTATE.A bit pending SError will trigger at current EL.
180d04c04a4SManish Pandey	 * Put explicit synchronization event to ensure newly unmasked interrupt
181d04c04a4SManish Pandey	 * is taken immediately.
182d04c04a4SManish Pandey	 */
183d04c04a4SManish Pandey	unmask_async_ea
184d04c04a4SManish Pandey
185d04c04a4SManish Pandey	/* Restore the original exception information along with zeroing the storage */
186d04c04a4SManish Pandey	ldr	x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ELR_EL3]
187d04c04a4SManish Pandey	msr	elr_el3, x30
188d04c04a4SManish Pandey	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ELR_EL3]
189d04c04a4SManish Pandey	ldr	x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_SPSR_EL3]
190d04c04a4SManish Pandey	msr	spsr_el3, x30
191d04c04a4SManish Pandey	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_SPSR_EL3]
192d04c04a4SManish Pandey	ldr	x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ESR_EL3]
193d04c04a4SManish Pandey	msr	esr_el3, x30
194d04c04a4SManish Pandey	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ESR_EL3]
195d04c04a4SManish Pandey
196d04c04a4SManish Pandey	/*
197d04c04a4SManish Pandey	 * If the original exception corresponds to SError from lower El, eret back
198d04c04a4SManish Pandey	 * to lower EL, otherwise return to vector table for original exception handling.
199d04c04a4SManish Pandey	 */
200d04c04a4SManish Pandey	ubfx	x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH
201d04c04a4SManish Pandey	cmp	x30, #EC_SERROR
202d04c04a4SManish Pandey	ldr	x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_GPREG_LR]
203d04c04a4SManish Pandey	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_GPREG_LR]
204d04c04a4SManish Pandey	b.eq	1f
205d04c04a4SManish Pandey	ret
206d04c04a4SManish Pandey1:
207d04c04a4SManish Pandey	exception_return
208d04c04a4SManish Pandeyendfunc handle_pending_async_ea
209d04c04a4SManish Pandey#endif /* HANDLE_EA_EL3_FIRST_NS */
210d04c04a4SManish Pandey
211d04c04a4SManish Pandey/*
212d04c04a4SManish Pandey * Refer to NOTE 1 : Kernel First handling (KFH)
213d04c04a4SManish Pandey *   Called in following scenarios
214d04c04a4SManish Pandey *     - Always, if outgoing world is either Secure or Realm
215d04c04a4SManish Pandey *     - KFH mode if outgoing world is Non-secure.
216d04c04a4SManish Pandey *
217d04c04a4SManish Pandey * This function assumes x30 has been saved.
218d04c04a4SManish Pandey */
219d04c04a4SManish Pandey
220d04c04a4SManish Pandeyfunc reflect_pending_async_ea_to_lower_el
221d04c04a4SManish Pandey	/*
222d04c04a4SManish Pandey	 * As the original exception was not handled we need to ensure that we return
223d04c04a4SManish Pandey	 * back to the instruction which caused the exception. To acheive that, eret
224d04c04a4SManish Pandey	 * to "elr-4" (Label "subtract_elr_el3") for SMC or simply eret otherwise
225d04c04a4SManish Pandey	 * (Label "skip_smc_check").
226d04c04a4SManish Pandey	 *
227d04c04a4SManish Pandey	 * LIMITATION: It could be that async EA is masked at the target exception level
228d04c04a4SManish Pandey	 * or the priority of async EA wrt to the EL3/secure interrupt is lower, which
229d04c04a4SManish Pandey	 * causes back and forth between lower EL and EL3. In case of back and forth between
230d04c04a4SManish Pandey	 * lower EL and EL3, we can track the loop count in "CTX_NESTED_EA_FLAG" and leverage
231d04c04a4SManish Pandey	 * previous ELR in "CTX_SAVED_ELR_EL3" to detect this cycle and further panic
232d04c04a4SManish Pandey	 * to indicate a problem here (Label "check_loop_ctr").
233d04c04a4SManish Pandey	 * However, setting SCR_EL3.IESB = 1, should give priority to SError handling
234d04c04a4SManish Pandey	 * as per AArch64.TakeException pseudo code in Arm ARM.
235d04c04a4SManish Pandey	 *
236d04c04a4SManish Pandey	 * TODO: In future if EL3 gets a capability to inject a virtual SError to lower
237d04c04a4SManish Pandey	 * ELs, we can remove the el3_panic and handle the original exception first and
238d04c04a4SManish Pandey	 * inject SError to lower EL before ereting back.
239d04c04a4SManish Pandey	 */
240d04c04a4SManish Pandey	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
241d04c04a4SManish Pandey	ldr	x29, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ELR_EL3]
242d04c04a4SManish Pandey	mrs	x28, elr_el3
243d04c04a4SManish Pandey	cmp	x29, x28
244d04c04a4SManish Pandey	b.eq	check_loop_ctr
245d04c04a4SManish Pandey	str	x28, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ELR_EL3]
246d04c04a4SManish Pandey	/* Zero the loop counter */
247d04c04a4SManish Pandey	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_NESTED_EA_FLAG]
248d04c04a4SManish Pandey	b	skip_loop_ctr
249d04c04a4SManish Pandeycheck_loop_ctr:
250d04c04a4SManish Pandey	ldr	x29, [sp, #CTX_EL3STATE_OFFSET + CTX_NESTED_EA_FLAG]
251d04c04a4SManish Pandey	add	x29, x29, #1
252d04c04a4SManish Pandey	str	x29, [sp, #CTX_EL3STATE_OFFSET + CTX_NESTED_EA_FLAG]
253d04c04a4SManish Pandey	cmp	x29, #ASYNC_EA_REPLAY_COUNTER
254d04c04a4SManish Pandey	b.ge	el3_panic
255d04c04a4SManish Pandeyskip_loop_ctr:
256d04c04a4SManish Pandey	/*
257d04c04a4SManish Pandey	 * Logic to distinguish if we came from SMC or any other exception.
258d04c04a4SManish Pandey	 * Use offsets in vector entry to get which exception we are handling.
259d04c04a4SManish Pandey	 * In each vector entry of size 0x200, address "0x0-0x80" is for sync
260d04c04a4SManish Pandey	 * exception and "0x80-0x200" is for async exceptions.
261d04c04a4SManish Pandey	 * Use vector base address (vbar_el3) and exception offset (LR) to
262d04c04a4SManish Pandey	 * calculate whether the address we came from is any of the following
263d04c04a4SManish Pandey	 * "0x0-0x80", "0x200-0x280", "0x400-0x480" or "0x600-0x680"
264d04c04a4SManish Pandey	 */
265d04c04a4SManish Pandey	mrs	x29, vbar_el3
266d04c04a4SManish Pandey	sub	x30, x30, x29
267d04c04a4SManish Pandey	and	x30, x30, #0x1ff
268d04c04a4SManish Pandey	cmp	x30, #0x80
269d04c04a4SManish Pandey	b.ge	skip_smc_check
270d04c04a4SManish Pandey	/* Its a synchronous exception, Now check if it is SMC or not? */
271d04c04a4SManish Pandey	mrs	x30, esr_el3
272d04c04a4SManish Pandey	ubfx	x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH
273d04c04a4SManish Pandey	cmp	x30, #EC_AARCH32_SMC
274d04c04a4SManish Pandey	b.eq	subtract_elr_el3
275d04c04a4SManish Pandey	cmp	x30, #EC_AARCH64_SMC
276d04c04a4SManish Pandey	b.eq	subtract_elr_el3
277d04c04a4SManish Pandey	b	skip_smc_check
278d04c04a4SManish Pandeysubtract_elr_el3:
279d04c04a4SManish Pandey	sub	x28, x28, #4
280d04c04a4SManish Pandeyskip_smc_check:
281d04c04a4SManish Pandey	msr	elr_el3, x28
282d04c04a4SManish Pandey	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
283d04c04a4SManish Pandey	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
284d04c04a4SManish Pandey	exception_return
285d04c04a4SManish Pandeyendfunc reflect_pending_async_ea_to_lower_el
286df8f3188SJeenu Viswambharan
287df8f3188SJeenu Viswambharan/*
288b56dc2a9SJeenu Viswambharan * Prelude for Synchronous External Abort handling. This function assumes that
289b56dc2a9SJeenu Viswambharan * all GP registers have been saved by the caller.
290b56dc2a9SJeenu Viswambharan *
291b56dc2a9SJeenu Viswambharan * x0: EA reason
292b56dc2a9SJeenu Viswambharan * x1: EA syndrome
293b56dc2a9SJeenu Viswambharan */
294b56dc2a9SJeenu Viswambharanfunc delegate_sync_ea
295*f87e54f7SManish Pandey#if ENABLE_FEAT_RAS
296b56dc2a9SJeenu Viswambharan	/*
297b56dc2a9SJeenu Viswambharan	 * Check for Uncontainable error type. If so, route to the platform
298b56dc2a9SJeenu Viswambharan	 * fatal error handler rather than the generic EA one.
299b56dc2a9SJeenu Viswambharan	 */
300b56dc2a9SJeenu Viswambharan	ubfx    x2, x1, #EABORT_SET_SHIFT, #EABORT_SET_WIDTH
301b56dc2a9SJeenu Viswambharan	cmp     x2, #ERROR_STATUS_SET_UC
302b56dc2a9SJeenu Viswambharan	b.ne    1f
303b56dc2a9SJeenu Viswambharan
304b56dc2a9SJeenu Viswambharan	/* Check fault status code */
305b56dc2a9SJeenu Viswambharan	ubfx    x3, x1, #EABORT_DFSC_SHIFT, #EABORT_DFSC_WIDTH
306b56dc2a9SJeenu Viswambharan	cmp     x3, #SYNC_EA_FSC
307b56dc2a9SJeenu Viswambharan	b.ne    1f
308b56dc2a9SJeenu Viswambharan
309b56dc2a9SJeenu Viswambharan	no_ret  plat_handle_uncontainable_ea
310b56dc2a9SJeenu Viswambharan1:
311b56dc2a9SJeenu Viswambharan#endif
312b56dc2a9SJeenu Viswambharan
313b56dc2a9SJeenu Viswambharan	b       ea_proceed
314b56dc2a9SJeenu Viswambharanendfunc delegate_sync_ea
315b56dc2a9SJeenu Viswambharan
316b56dc2a9SJeenu Viswambharan
317b56dc2a9SJeenu Viswambharan/*
318b56dc2a9SJeenu Viswambharan * Prelude for Asynchronous External Abort handling. This function assumes that
319b56dc2a9SJeenu Viswambharan * all GP registers have been saved by the caller.
320b56dc2a9SJeenu Viswambharan *
321b56dc2a9SJeenu Viswambharan * x0: EA reason
322b56dc2a9SJeenu Viswambharan * x1: EA syndrome
323b56dc2a9SJeenu Viswambharan */
324b56dc2a9SJeenu Viswambharanfunc delegate_async_ea
325*f87e54f7SManish Pandey#if ENABLE_FEAT_RAS
326d435238dSManish Pandey	/* Check Exception Class to ensure SError, as this function should
327d435238dSManish Pandey	 * only be invoked for SError. If that is not the case, which implies
328d435238dSManish Pandey	 * either an HW error or programming error, panic.
329d435238dSManish Pandey	 */
330d435238dSManish Pandey	ubfx	x2, x1, #ESR_EC_SHIFT, #ESR_EC_LENGTH
331d435238dSManish Pandey	cmp	x2, EC_SERROR
332bd62ce98SGovindraj Raja	b.ne	el3_panic
333b56dc2a9SJeenu Viswambharan	/*
334b56dc2a9SJeenu Viswambharan	 * Check for Implementation Defined Syndrome. If so, skip checking
335b56dc2a9SJeenu Viswambharan	 * Uncontainable error type from the syndrome as the format is unknown.
336b56dc2a9SJeenu Viswambharan	 */
337b56dc2a9SJeenu Viswambharan	tbnz	x1, #SERROR_IDS_BIT, 1f
338b56dc2a9SJeenu Viswambharan
339d435238dSManish Pandey	/* AET only valid when DFSC is 0x11 */
340d435238dSManish Pandey	ubfx	x2, x1, #EABORT_DFSC_SHIFT, #EABORT_DFSC_WIDTH
341d435238dSManish Pandey	cmp	x2, #DFSC_SERROR
342d435238dSManish Pandey	b.ne	1f
343d435238dSManish Pandey
344b56dc2a9SJeenu Viswambharan	/*
345b56dc2a9SJeenu Viswambharan	 * Check for Uncontainable error type. If so, route to the platform
346b56dc2a9SJeenu Viswambharan	 * fatal error handler rather than the generic EA one.
347b56dc2a9SJeenu Viswambharan	 */
348d435238dSManish Pandey	ubfx	x3, x1, #EABORT_AET_SHIFT, #EABORT_AET_WIDTH
349d435238dSManish Pandey	cmp	x3, #ERROR_STATUS_UET_UC
350b56dc2a9SJeenu Viswambharan	b.ne	1f
351b56dc2a9SJeenu Viswambharan
352b56dc2a9SJeenu Viswambharan	no_ret	plat_handle_uncontainable_ea
353b56dc2a9SJeenu Viswambharan1:
354b56dc2a9SJeenu Viswambharan#endif
355b56dc2a9SJeenu Viswambharan
356b56dc2a9SJeenu Viswambharan	b	ea_proceed
357b56dc2a9SJeenu Viswambharanendfunc delegate_async_ea
358b56dc2a9SJeenu Viswambharan
359b56dc2a9SJeenu Viswambharan
360b56dc2a9SJeenu Viswambharan/*
361df8f3188SJeenu Viswambharan * Delegate External Abort handling to platform's EA handler. This function
362df8f3188SJeenu Viswambharan * assumes that all GP registers have been saved by the caller.
363df8f3188SJeenu Viswambharan *
364df8f3188SJeenu Viswambharan * x0: EA reason
365df8f3188SJeenu Viswambharan * x1: EA syndrome
366df8f3188SJeenu Viswambharan */
367df8f3188SJeenu Viswambharanfunc ea_proceed
368d5a23af5SJeenu Viswambharan	/*
369d5a23af5SJeenu Viswambharan	 * If the ESR loaded earlier is not zero, we were processing an EA
370d5a23af5SJeenu Viswambharan	 * already, and this is a double fault.
371d5a23af5SJeenu Viswambharan	 */
372d5a23af5SJeenu Viswambharan	ldr	x5, [sp, #CTX_EL3STATE_OFFSET + CTX_ESR_EL3]
373d5a23af5SJeenu Viswambharan	cbz	x5, 1f
374d5a23af5SJeenu Viswambharan	no_ret	plat_handle_double_fault
375d5a23af5SJeenu Viswambharan
376d5a23af5SJeenu Viswambharan1:
377df8f3188SJeenu Viswambharan	/* Save EL3 state */
378df8f3188SJeenu Viswambharan	mrs	x2, spsr_el3
379df8f3188SJeenu Viswambharan	mrs	x3, elr_el3
380df8f3188SJeenu Viswambharan	stp	x2, x3, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
381df8f3188SJeenu Viswambharan
382df8f3188SJeenu Viswambharan	/*
383df8f3188SJeenu Viswambharan	 * Save ESR as handling might involve lower ELs, and returning back to
384df8f3188SJeenu Viswambharan	 * EL3 from there would trample the original ESR.
385df8f3188SJeenu Viswambharan	 */
386df8f3188SJeenu Viswambharan	mrs	x4, scr_el3
387df8f3188SJeenu Viswambharan	mrs	x5, esr_el3
388df8f3188SJeenu Viswambharan	stp	x4, x5, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
389df8f3188SJeenu Viswambharan
390df8f3188SJeenu Viswambharan	/*
391df8f3188SJeenu Viswambharan	 * Setup rest of arguments, and call platform External Abort handler.
392df8f3188SJeenu Viswambharan	 *
393df8f3188SJeenu Viswambharan	 * x0: EA reason (already in place)
394df8f3188SJeenu Viswambharan	 * x1: Exception syndrome (already in place).
395df8f3188SJeenu Viswambharan	 * x2: Cookie (unused for now).
396df8f3188SJeenu Viswambharan	 * x3: Context pointer.
397df8f3188SJeenu Viswambharan	 * x4: Flags (security state from SCR for now).
398df8f3188SJeenu Viswambharan	 */
399df8f3188SJeenu Viswambharan	mov	x2, xzr
400df8f3188SJeenu Viswambharan	mov	x3, sp
401df8f3188SJeenu Viswambharan	ubfx	x4, x4, #0, #1
402df8f3188SJeenu Viswambharan
403df8f3188SJeenu Viswambharan	/* Switch to runtime stack */
404df8f3188SJeenu Viswambharan	ldr	x5, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
405ed108b56SAlexei Fedorov	msr	spsel, #MODE_SP_EL0
406df8f3188SJeenu Viswambharan	mov	sp, x5
407df8f3188SJeenu Viswambharan
408df8f3188SJeenu Viswambharan	mov	x29, x30
409ee6ff1bbSJeenu Viswambharan#if ENABLE_ASSERTIONS
410ee6ff1bbSJeenu Viswambharan	/* Stash the stack pointer */
411ee6ff1bbSJeenu Viswambharan	mov	x28, sp
412ee6ff1bbSJeenu Viswambharan#endif
413df8f3188SJeenu Viswambharan	bl	plat_ea_handler
414df8f3188SJeenu Viswambharan
415ee6ff1bbSJeenu Viswambharan#if ENABLE_ASSERTIONS
416ee6ff1bbSJeenu Viswambharan	/*
417ee6ff1bbSJeenu Viswambharan	 * Error handling flows might involve long jumps; so upon returning from
418ee6ff1bbSJeenu Viswambharan	 * the platform error handler, validate that the we've completely
419ee6ff1bbSJeenu Viswambharan	 * unwound the stack.
420ee6ff1bbSJeenu Viswambharan	 */
421ee6ff1bbSJeenu Viswambharan	mov	x27, sp
422ee6ff1bbSJeenu Viswambharan	cmp	x28, x27
423ee6ff1bbSJeenu Viswambharan	ASM_ASSERT(eq)
424ee6ff1bbSJeenu Viswambharan#endif
425ee6ff1bbSJeenu Viswambharan
426df8f3188SJeenu Viswambharan	/* Make SP point to context */
427ed108b56SAlexei Fedorov	msr	spsel, #MODE_SP_ELX
428df8f3188SJeenu Viswambharan
429d5a23af5SJeenu Viswambharan	/* Restore EL3 state and ESR */
430df8f3188SJeenu Viswambharan	ldp	x1, x2, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
431df8f3188SJeenu Viswambharan	msr	spsr_el3, x1
432df8f3188SJeenu Viswambharan	msr	elr_el3, x2
433df8f3188SJeenu Viswambharan
434df8f3188SJeenu Viswambharan	/* Restore ESR_EL3 and SCR_EL3 */
435df8f3188SJeenu Viswambharan	ldp	x3, x4, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
436df8f3188SJeenu Viswambharan	msr	scr_el3, x3
437df8f3188SJeenu Viswambharan	msr	esr_el3, x4
438df8f3188SJeenu Viswambharan
439d5a23af5SJeenu Viswambharan#if ENABLE_ASSERTIONS
440d5a23af5SJeenu Viswambharan	cmp	x4, xzr
441d5a23af5SJeenu Viswambharan	ASM_ASSERT(ne)
442d5a23af5SJeenu Viswambharan#endif
443d5a23af5SJeenu Viswambharan
444d5a23af5SJeenu Viswambharan	/* Clear ESR storage */
445d5a23af5SJeenu Viswambharan	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_ESR_EL3]
446d5a23af5SJeenu Viswambharan
447d5a23af5SJeenu Viswambharan	ret	x29
448df8f3188SJeenu Viswambharanendfunc ea_proceed
449