xref: /rk3399_ARM-atf/bl31/aarch64/ea_delegate.S (revision c72200357aed49fd51dc21e45d4396f5402df811)
1df8f3188SJeenu Viswambharan/*
297215e0fSDaniel Boulby * Copyright (c) 2018-2022, ARM Limited and Contributors. All rights reserved.
3df56e9d1SVarun Wadekar * Copyright (c) 2022, NVIDIA Corporation. All rights reserved.
4df8f3188SJeenu Viswambharan *
5df8f3188SJeenu Viswambharan * SPDX-License-Identifier: BSD-3-Clause
6df8f3188SJeenu Viswambharan */
7df8f3188SJeenu Viswambharan
8df8f3188SJeenu Viswambharan
9d5a23af5SJeenu Viswambharan#include <assert_macros.S>
10df8f3188SJeenu Viswambharan#include <asm_macros.S>
11ee6ff1bbSJeenu Viswambharan#include <assert_macros.S>
1209d40e0eSAntonio Nino Diaz#include <bl31/ea_handle.h>
13df8f3188SJeenu Viswambharan#include <context.h>
1409d40e0eSAntonio Nino Diaz#include <lib/extensions/ras_arch.h>
1580942622Slaurenw-arm#include <cpu_macros.S>
1680942622Slaurenw-arm#include <context.h>
17df8f3188SJeenu Viswambharan
186f7de9a8SManish Pandey	.globl	handle_lower_el_sync_ea
19c2d32a5fSMadhukar Pappireddy	.globl	handle_lower_el_async_ea
20d04c04a4SManish Pandey	.globl	handle_pending_async_ea
21df8f3188SJeenu Viswambharan/*
226d22b089SManish Pandey * This function handles Synchronous External Aborts from lower EL.
23df8f3188SJeenu Viswambharan *
246f7de9a8SManish Pandey * It delegates the handling of the EA to platform handler, and upon successfully
256f7de9a8SManish Pandey * handling the EA, exits EL3; otherwise panics.
266f7de9a8SManish Pandey *
276f7de9a8SManish Pandey * This function assumes x30 has been saved.
28df8f3188SJeenu Viswambharan */
296f7de9a8SManish Pandeyfunc handle_lower_el_sync_ea
30df8f3188SJeenu Viswambharan	mrs	x30, esr_el3
31df8f3188SJeenu Viswambharan	ubfx	x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH
32df8f3188SJeenu Viswambharan
33df8f3188SJeenu Viswambharan	/* Check for I/D aborts from lower EL */
34df8f3188SJeenu Viswambharan	cmp	x30, #EC_IABORT_LOWER_EL
35df8f3188SJeenu Viswambharan	b.eq	1f
36df8f3188SJeenu Viswambharan
37df8f3188SJeenu Viswambharan	cmp	x30, #EC_DABORT_LOWER_EL
3880942622Slaurenw-arm	b.eq	1f
3980942622Slaurenw-arm
406d22b089SManish Pandey	/* EA other than above are unhandled exceptions */
416d22b089SManish Pandey	no_ret	report_unhandled_exception
42df8f3188SJeenu Viswambharan1:
43e290a8fcSAlexei Fedorov	/*
44ed108b56SAlexei Fedorov	 * Save general purpose and ARMv8.3-PAuth registers (if enabled).
451d6d6802SBoyan Karatotev	 * Also save PMCR_EL0 and set the PSTATE to a known state.
46e290a8fcSAlexei Fedorov	 */
4797215e0fSDaniel Boulby	bl	prepare_el3_entry
48e290a8fcSAlexei Fedorov
49b86048c4SAntonio Nino Diaz#if ENABLE_PAUTH
50ed108b56SAlexei Fedorov	/* Load and program APIAKey firmware key */
51ed108b56SAlexei Fedorov	bl	pauth_load_bl31_apiakey
52b86048c4SAntonio Nino Diaz#endif
535283962eSAntonio Nino Diaz
54df8f3188SJeenu Viswambharan	/* Setup exception class and syndrome arguments for platform handler */
55df8f3188SJeenu Viswambharan	mov	x0, #ERROR_EA_SYNC
56df8f3188SJeenu Viswambharan	mrs	x1, esr_el3
57bb9549baSJan Dabros	bl	delegate_sync_ea
58df8f3188SJeenu Viswambharan
59bb9549baSJan Dabros	/* el3_exit assumes SP_EL0 on entry */
60bb9549baSJan Dabros	msr	spsel, #MODE_SP_EL0
61bb9549baSJan Dabros	b	el3_exit
626f7de9a8SManish Pandeyendfunc handle_lower_el_sync_ea
63df8f3188SJeenu Viswambharan
64df8f3188SJeenu Viswambharan
65df8f3188SJeenu Viswambharan/*
66df8f3188SJeenu Viswambharan * This function handles SErrors from lower ELs.
67df8f3188SJeenu Viswambharan *
686f7de9a8SManish Pandey * It delegates the handling of the EA to platform handler, and upon successfully
696f7de9a8SManish Pandey * handling the EA, exits EL3; otherwise panics.
706f7de9a8SManish Pandey *
716f7de9a8SManish Pandey * This function assumes x30 has been saved.
72df8f3188SJeenu Viswambharan */
736f7de9a8SManish Pandeyfunc handle_lower_el_async_ea
74df8f3188SJeenu Viswambharan
75e290a8fcSAlexei Fedorov	/*
76ed108b56SAlexei Fedorov	 * Save general purpose and ARMv8.3-PAuth registers (if enabled).
771d6d6802SBoyan Karatotev	 * Also save PMCR_EL0 and set the PSTATE to a known state.
78e290a8fcSAlexei Fedorov	 */
7997215e0fSDaniel Boulby	bl	prepare_el3_entry
80e290a8fcSAlexei Fedorov
81b86048c4SAntonio Nino Diaz#if ENABLE_PAUTH
82ed108b56SAlexei Fedorov	/* Load and program APIAKey firmware key */
83ed108b56SAlexei Fedorov	bl	pauth_load_bl31_apiakey
84b86048c4SAntonio Nino Diaz#endif
855283962eSAntonio Nino Diaz
86df8f3188SJeenu Viswambharan	/* Setup exception class and syndrome arguments for platform handler */
87df8f3188SJeenu Viswambharan	mov	x0, #ERROR_EA_ASYNC
88df8f3188SJeenu Viswambharan	mrs	x1, esr_el3
89bb9549baSJan Dabros	bl	delegate_async_ea
90bb9549baSJan Dabros
91bb9549baSJan Dabros	/* el3_exit assumes SP_EL0 on entry */
92bb9549baSJan Dabros	msr	spsel, #MODE_SP_EL0
93bb9549baSJan Dabros	b	el3_exit
946f7de9a8SManish Pandeyendfunc handle_lower_el_async_ea
95df8f3188SJeenu Viswambharan
96d04c04a4SManish Pandey/*
976d22b089SManish Pandey * Handler for async EA from lower EL synchronized at EL3 entry in FFH mode.
98d04c04a4SManish Pandey *
99d04c04a4SManish Pandey * This scenario may arise when there is an error (EA) in the system which is not
100d04c04a4SManish Pandey * yet signaled to PE while executing in lower EL. During entry into EL3, the errors
101d04c04a4SManish Pandey * are synchronized either implicitly or explicitly causing async EA to pend at EL3.
102d04c04a4SManish Pandey *
1036d22b089SManish Pandey * On detecting the pending EA (via ISR_EL1.A), if the EA routing model is Firmware
1046d22b089SManish Pandey * First handling (FFH, SCR_EL3.EA = 1) this handler first handles the pending EA
1056d22b089SManish Pandey * and then handles the original exception.
106d04c04a4SManish Pandey *
107d04c04a4SManish Pandey * This function assumes x30 has been saved.
108d04c04a4SManish Pandey */
109d04c04a4SManish Pandeyfunc handle_pending_async_ea
110d04c04a4SManish Pandey	/*
111d04c04a4SManish Pandey	 * Prepare for nested handling of EA. Stash sysregs clobbered by nested
112d04c04a4SManish Pandey	 * exception and handler
113d04c04a4SManish Pandey	 */
114d04c04a4SManish Pandey	str	x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_GPREG_LR]
115d04c04a4SManish Pandey	mrs	x30, esr_el3
116d04c04a4SManish Pandey	str	x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ESR_EL3]
117d04c04a4SManish Pandey	mrs	x30, spsr_el3
118d04c04a4SManish Pandey	str	x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_SPSR_EL3]
119d04c04a4SManish Pandey	mrs	x30, elr_el3
120d04c04a4SManish Pandey	str	x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ELR_EL3]
121d04c04a4SManish Pandey
122d04c04a4SManish Pandey	mov	x30, #1
123d04c04a4SManish Pandey	str	x30, [sp, #CTX_EL3STATE_OFFSET + CTX_NESTED_EA_FLAG]
124d04c04a4SManish Pandey	/*
125d04c04a4SManish Pandey	 * Restore the original x30 saved as part of entering EL3. This is not
126d04c04a4SManish Pandey	 * required for the current function but for EL3 SError vector entry
127d04c04a4SManish Pandey	 * once PSTATE.A bit is unmasked. We restore x30 and then the same
128d04c04a4SManish Pandey	 * value is stored in EL3 SError vector entry.
129d04c04a4SManish Pandey	 */
130d04c04a4SManish Pandey	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
131d04c04a4SManish Pandey
132d04c04a4SManish Pandey	/*
133d04c04a4SManish Pandey	 * After clearing PSTATE.A bit pending SError will trigger at current EL.
134d04c04a4SManish Pandey	 * Put explicit synchronization event to ensure newly unmasked interrupt
135d04c04a4SManish Pandey	 * is taken immediately.
136d04c04a4SManish Pandey	 */
137d04c04a4SManish Pandey	unmask_async_ea
138d04c04a4SManish Pandey
139d04c04a4SManish Pandey	/* Restore the original exception information along with zeroing the storage */
140d04c04a4SManish Pandey	ldr	x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ELR_EL3]
141d04c04a4SManish Pandey	msr	elr_el3, x30
142d04c04a4SManish Pandey	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ELR_EL3]
143d04c04a4SManish Pandey	ldr	x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_SPSR_EL3]
144d04c04a4SManish Pandey	msr	spsr_el3, x30
145d04c04a4SManish Pandey	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_SPSR_EL3]
146d04c04a4SManish Pandey	ldr	x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ESR_EL3]
147d04c04a4SManish Pandey	msr	esr_el3, x30
148d04c04a4SManish Pandey	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ESR_EL3]
149d04c04a4SManish Pandey
150d04c04a4SManish Pandey	/*
151d04c04a4SManish Pandey	 * If the original exception corresponds to SError from lower El, eret back
152d04c04a4SManish Pandey	 * to lower EL, otherwise return to vector table for original exception handling.
153d04c04a4SManish Pandey	 */
154d04c04a4SManish Pandey	ubfx	x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH
155d04c04a4SManish Pandey	cmp	x30, #EC_SERROR
156d04c04a4SManish Pandey	ldr	x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_GPREG_LR]
157d04c04a4SManish Pandey	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_GPREG_LR]
158d04c04a4SManish Pandey	b.eq	1f
159d04c04a4SManish Pandey	ret
160d04c04a4SManish Pandey1:
1610bc3115fSJaiprakash Singh	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
1620bc3115fSJaiprakash Singh	str	xzr, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
163d04c04a4SManish Pandey	exception_return
164d04c04a4SManish Pandeyendfunc handle_pending_async_ea
165df8f3188SJeenu Viswambharan
166df8f3188SJeenu Viswambharan/*
167b56dc2a9SJeenu Viswambharan * Prelude for Synchronous External Abort handling. This function assumes that
168b56dc2a9SJeenu Viswambharan * all GP registers have been saved by the caller.
169b56dc2a9SJeenu Viswambharan *
170b56dc2a9SJeenu Viswambharan * x0: EA reason
171b56dc2a9SJeenu Viswambharan * x1: EA syndrome
172b56dc2a9SJeenu Viswambharan */
173b56dc2a9SJeenu Viswambharanfunc delegate_sync_ea
174f87e54f7SManish Pandey#if ENABLE_FEAT_RAS
175b56dc2a9SJeenu Viswambharan	/*
176b56dc2a9SJeenu Viswambharan	 * Check for Uncontainable error type. If so, route to the platform
177b56dc2a9SJeenu Viswambharan	 * fatal error handler rather than the generic EA one.
178b56dc2a9SJeenu Viswambharan	 */
179b56dc2a9SJeenu Viswambharan	ubfx    x2, x1, #EABORT_SET_SHIFT, #EABORT_SET_WIDTH
180b56dc2a9SJeenu Viswambharan	cmp     x2, #ERROR_STATUS_SET_UC
181b56dc2a9SJeenu Viswambharan	b.ne    1f
182b56dc2a9SJeenu Viswambharan
183b56dc2a9SJeenu Viswambharan	/* Check fault status code */
184b56dc2a9SJeenu Viswambharan	ubfx    x3, x1, #EABORT_DFSC_SHIFT, #EABORT_DFSC_WIDTH
185b56dc2a9SJeenu Viswambharan	cmp     x3, #SYNC_EA_FSC
186b56dc2a9SJeenu Viswambharan	b.ne    1f
187b56dc2a9SJeenu Viswambharan
188b56dc2a9SJeenu Viswambharan	no_ret  plat_handle_uncontainable_ea
189b56dc2a9SJeenu Viswambharan1:
190b56dc2a9SJeenu Viswambharan#endif
191b56dc2a9SJeenu Viswambharan
192b56dc2a9SJeenu Viswambharan	b       ea_proceed
193b56dc2a9SJeenu Viswambharanendfunc delegate_sync_ea
194b56dc2a9SJeenu Viswambharan
195b56dc2a9SJeenu Viswambharan
196b56dc2a9SJeenu Viswambharan/*
197b56dc2a9SJeenu Viswambharan * Prelude for Asynchronous External Abort handling. This function assumes that
198b56dc2a9SJeenu Viswambharan * all GP registers have been saved by the caller.
199b56dc2a9SJeenu Viswambharan *
200b56dc2a9SJeenu Viswambharan * x0: EA reason
201b56dc2a9SJeenu Viswambharan * x1: EA syndrome
202b56dc2a9SJeenu Viswambharan */
203b56dc2a9SJeenu Viswambharanfunc delegate_async_ea
204f87e54f7SManish Pandey#if ENABLE_FEAT_RAS
205d435238dSManish Pandey	/* Check Exception Class to ensure SError, as this function should
206d435238dSManish Pandey	 * only be invoked for SError. If that is not the case, which implies
207d435238dSManish Pandey	 * either an HW error or programming error, panic.
208d435238dSManish Pandey	 */
209d435238dSManish Pandey	ubfx	x2, x1, #ESR_EC_SHIFT, #ESR_EC_LENGTH
210d435238dSManish Pandey	cmp	x2, EC_SERROR
211bd62ce98SGovindraj Raja	b.ne	el3_panic
212b56dc2a9SJeenu Viswambharan	/*
213b56dc2a9SJeenu Viswambharan	 * Check for Implementation Defined Syndrome. If so, skip checking
214b56dc2a9SJeenu Viswambharan	 * Uncontainable error type from the syndrome as the format is unknown.
215b56dc2a9SJeenu Viswambharan	 */
216b56dc2a9SJeenu Viswambharan	tbnz	x1, #SERROR_IDS_BIT, 1f
217b56dc2a9SJeenu Viswambharan
218d435238dSManish Pandey	/* AET only valid when DFSC is 0x11 */
219d435238dSManish Pandey	ubfx	x2, x1, #EABORT_DFSC_SHIFT, #EABORT_DFSC_WIDTH
220d435238dSManish Pandey	cmp	x2, #DFSC_SERROR
221d435238dSManish Pandey	b.ne	1f
222d435238dSManish Pandey
223b56dc2a9SJeenu Viswambharan	/*
224b56dc2a9SJeenu Viswambharan	 * Check for Uncontainable error type. If so, route to the platform
225b56dc2a9SJeenu Viswambharan	 * fatal error handler rather than the generic EA one.
226b56dc2a9SJeenu Viswambharan	 */
227d435238dSManish Pandey	ubfx	x3, x1, #EABORT_AET_SHIFT, #EABORT_AET_WIDTH
228d435238dSManish Pandey	cmp	x3, #ERROR_STATUS_UET_UC
229b56dc2a9SJeenu Viswambharan	b.ne	1f
230b56dc2a9SJeenu Viswambharan
231b56dc2a9SJeenu Viswambharan	no_ret	plat_handle_uncontainable_ea
232b56dc2a9SJeenu Viswambharan1:
233b56dc2a9SJeenu Viswambharan#endif
234b56dc2a9SJeenu Viswambharan
235b56dc2a9SJeenu Viswambharan	b	ea_proceed
236b56dc2a9SJeenu Viswambharanendfunc delegate_async_ea
237b56dc2a9SJeenu Viswambharan
238b56dc2a9SJeenu Viswambharan
239b56dc2a9SJeenu Viswambharan/*
240df8f3188SJeenu Viswambharan * Delegate External Abort handling to platform's EA handler. This function
241df8f3188SJeenu Viswambharan * assumes that all GP registers have been saved by the caller.
242df8f3188SJeenu Viswambharan *
243df8f3188SJeenu Viswambharan * x0: EA reason
244df8f3188SJeenu Viswambharan * x1: EA syndrome
245df8f3188SJeenu Viswambharan */
246df8f3188SJeenu Viswambharanfunc ea_proceed
247d5a23af5SJeenu Viswambharan	/*
248*c7220035SManish Pandey	 * If it is a double fault invoke platform handler.
249*c7220035SManish Pandey	 * Double fault scenario would arise when platform is handling a fault in
250*c7220035SManish Pandey	 * lower EL using plat_ea_handler() and another fault happens which would
251*c7220035SManish Pandey	 * trap into EL3 as FFH_SUPPORT is enabled for the platform.
252d5a23af5SJeenu Viswambharan	 */
253*c7220035SManish Pandey	ldr	x5, [sp, #CTX_EL3STATE_OFFSET + CTX_DOUBLE_FAULT_ESR]
254d5a23af5SJeenu Viswambharan	cbz	x5, 1f
255d5a23af5SJeenu Viswambharan	no_ret	plat_handle_double_fault
256d5a23af5SJeenu Viswambharan
257d5a23af5SJeenu Viswambharan1:
258*c7220035SManish Pandey	/* Save EL3 state as handling might involve lower ELs */
259df8f3188SJeenu Viswambharan	mrs	x2, spsr_el3
260df8f3188SJeenu Viswambharan	mrs	x3, elr_el3
261df8f3188SJeenu Viswambharan	stp	x2, x3, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
262*c7220035SManish Pandey	mrs	x4, scr_el3
263*c7220035SManish Pandey	str	x4, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
264df8f3188SJeenu Viswambharan
265df8f3188SJeenu Viswambharan	/*
266*c7220035SManish Pandey	 * Save CTX_DOUBLE_FAULT_ESR, so that if another fault happens in lower EL, we
267*c7220035SManish Pandey	 * catch it as DoubleFault in next invocation of ea_proceed() along with
268*c7220035SManish Pandey	 * preserving original ESR_EL3.
269df8f3188SJeenu Viswambharan	 */
270df8f3188SJeenu Viswambharan	mrs	x5, esr_el3
271*c7220035SManish Pandey	str	x5, [sp, #CTX_EL3STATE_OFFSET + CTX_DOUBLE_FAULT_ESR]
272df8f3188SJeenu Viswambharan
273df8f3188SJeenu Viswambharan	/*
274df8f3188SJeenu Viswambharan	 * Setup rest of arguments, and call platform External Abort handler.
275df8f3188SJeenu Viswambharan	 *
276df8f3188SJeenu Viswambharan	 * x0: EA reason (already in place)
277df8f3188SJeenu Viswambharan	 * x1: Exception syndrome (already in place).
278df8f3188SJeenu Viswambharan	 * x2: Cookie (unused for now).
279df8f3188SJeenu Viswambharan	 * x3: Context pointer.
280df8f3188SJeenu Viswambharan	 * x4: Flags (security state from SCR for now).
281df8f3188SJeenu Viswambharan	 */
282df8f3188SJeenu Viswambharan	mov	x2, xzr
283df8f3188SJeenu Viswambharan	mov	x3, sp
284df8f3188SJeenu Viswambharan	ubfx	x4, x4, #0, #1
285df8f3188SJeenu Viswambharan
286df8f3188SJeenu Viswambharan	/* Switch to runtime stack */
287df8f3188SJeenu Viswambharan	ldr	x5, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
288ed108b56SAlexei Fedorov	msr	spsel, #MODE_SP_EL0
289df8f3188SJeenu Viswambharan	mov	sp, x5
290df8f3188SJeenu Viswambharan
291df8f3188SJeenu Viswambharan	mov	x29, x30
292ee6ff1bbSJeenu Viswambharan#if ENABLE_ASSERTIONS
293ee6ff1bbSJeenu Viswambharan	/* Stash the stack pointer */
294ee6ff1bbSJeenu Viswambharan	mov	x28, sp
295ee6ff1bbSJeenu Viswambharan#endif
296df8f3188SJeenu Viswambharan	bl	plat_ea_handler
297df8f3188SJeenu Viswambharan
298ee6ff1bbSJeenu Viswambharan#if ENABLE_ASSERTIONS
299ee6ff1bbSJeenu Viswambharan	/*
300ee6ff1bbSJeenu Viswambharan	 * Error handling flows might involve long jumps; so upon returning from
301ee6ff1bbSJeenu Viswambharan	 * the platform error handler, validate that the we've completely
302ee6ff1bbSJeenu Viswambharan	 * unwound the stack.
303ee6ff1bbSJeenu Viswambharan	 */
304ee6ff1bbSJeenu Viswambharan	mov	x27, sp
305ee6ff1bbSJeenu Viswambharan	cmp	x28, x27
306ee6ff1bbSJeenu Viswambharan	ASM_ASSERT(eq)
307ee6ff1bbSJeenu Viswambharan#endif
308ee6ff1bbSJeenu Viswambharan
309df8f3188SJeenu Viswambharan	/* Make SP point to context */
310ed108b56SAlexei Fedorov	msr	spsel, #MODE_SP_ELX
311df8f3188SJeenu Viswambharan
312*c7220035SManish Pandey	/* Clear Double Fault storage */
313*c7220035SManish Pandey	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_DOUBLE_FAULT_ESR]
314*c7220035SManish Pandey
315*c7220035SManish Pandey	/* Restore EL3 state */
316df8f3188SJeenu Viswambharan	ldp	x1, x2, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
317df8f3188SJeenu Viswambharan	msr	spsr_el3, x1
318df8f3188SJeenu Viswambharan	msr	elr_el3, x2
319*c7220035SManish Pandey	ldr	x3, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
320df8f3188SJeenu Viswambharan	msr	scr_el3, x3
321d5a23af5SJeenu Viswambharan
322d5a23af5SJeenu Viswambharan	ret	x29
323df8f3188SJeenu Viswambharanendfunc ea_proceed
324