xref: /rk3399_ARM-atf/bl31/aarch64/ea_delegate.S (revision ee6ff1bb37e67df2e080cf49a03dfc8310dbc733)
1/*
2 * Copyright (c) 2018, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7
8#include <asm_macros.S>
9#include <assert_macros.S>
10#include <context.h>
11#include <ea_handle.h>
12
13
14	.globl	handle_lower_el_ea_esb
15	.globl	enter_lower_el_sync_ea
16	.globl	enter_lower_el_async_ea
17
18
19/*
20 * Function to delegate External Aborts synchronized by ESB instruction at EL3
21 * vector entry. This function assumes GP registers x0-x29 have been saved, and
22 * are available for use. It delegates the handling of the EA to platform
23 * handler, and returns only upon successfully handling the EA; otherwise
24 * panics. On return from this function, the original exception handler is
25 * expected to resume.
26 */
27func handle_lower_el_ea_esb
28	mov	x0, #ERROR_EA_ESB
29	mrs	x1, DISR_EL1
30	b	ea_proceed
31endfunc handle_lower_el_ea_esb
32
33
34/*
35 * This function forms the tail end of Synchronous Exception entry from lower
36 * EL, and expects to handle only Synchronous External Aborts from lower EL. If
37 * any other kind of exception is detected, then this function reports unhandled
38 * exception.
39 *
40 * Since it's part of exception vector, this function doesn't expect any GP
41 * registers to have been saved. It delegates the handling of the EA to platform
42 * handler, and upon successfully handling the EA, exits EL3; otherwise panics.
43 */
44func enter_lower_el_sync_ea
45	/*
46	 * Explicitly save x30 so as to free up a register and to enable
47	 * branching.
48	 */
49	str	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
50
51	mrs	x30, esr_el3
52	ubfx	x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH
53
54	/* Check for I/D aborts from lower EL */
55	cmp	x30, #EC_IABORT_LOWER_EL
56	b.eq	1f
57
58	cmp	x30, #EC_DABORT_LOWER_EL
59	b.ne	2f
60
611:
62	/* Test for EA bit in the instruction syndrome */
63	mrs	x30, esr_el3
64	tbz	x30, #ESR_ISS_EABORT_EA_BIT, 2f
65
66	/* Save GP registers */
67	bl	save_gp_registers
68
69	/* Setup exception class and syndrome arguments for platform handler */
70	mov	x0, #ERROR_EA_SYNC
71	mrs	x1, esr_el3
72	adr	x30, el3_exit
73	b	ea_proceed
74
752:
76	/* Synchronous exceptions other than the above are assumed to be EA */
77	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
78	no_ret	report_unhandled_exception
79endfunc enter_lower_el_sync_ea
80
81
82/*
83 * This function handles SErrors from lower ELs.
84 *
85 * Since it's part of exception vector, this function doesn't expect any GP
86 * registers to have been saved. It delegates the handling of the EA to platform
87 * handler, and upon successfully handling the EA, exits EL3; otherwise panics.
88 */
89func enter_lower_el_async_ea
90	/*
91	 * Explicitly save x30 so as to free up a register and to enable
92	 * branching
93	 */
94	str	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
95
96	/* Save GP registers */
97	bl	save_gp_registers
98
99	/* Setup exception class and syndrome arguments for platform handler */
100	mov	x0, #ERROR_EA_ASYNC
101	mrs	x1, esr_el3
102	adr	x30, el3_exit
103	b	ea_proceed
104endfunc enter_lower_el_async_ea
105
106
107/*
108 * Delegate External Abort handling to platform's EA handler. This function
109 * assumes that all GP registers have been saved by the caller.
110 *
111 * x0: EA reason
112 * x1: EA syndrome
113 */
114func ea_proceed
115	/* Save EL3 state */
116	mrs	x2, spsr_el3
117	mrs	x3, elr_el3
118	stp	x2, x3, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
119
120	/*
121	 * Save ESR as handling might involve lower ELs, and returning back to
122	 * EL3 from there would trample the original ESR.
123	 */
124	mrs	x4, scr_el3
125	mrs	x5, esr_el3
126	stp	x4, x5, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
127
128	/*
129	 * Setup rest of arguments, and call platform External Abort handler.
130	 *
131	 * x0: EA reason (already in place)
132	 * x1: Exception syndrome (already in place).
133	 * x2: Cookie (unused for now).
134	 * x3: Context pointer.
135	 * x4: Flags (security state from SCR for now).
136	 */
137	mov	x2, xzr
138	mov	x3, sp
139	ubfx	x4, x4, #0, #1
140
141	/* Switch to runtime stack */
142	ldr	x5, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
143	msr	spsel, #0
144	mov	sp, x5
145
146	mov	x29, x30
147#if ENABLE_ASSERTIONS
148	/* Stash the stack pointer */
149	mov	x28, sp
150#endif
151	bl	plat_ea_handler
152	mov	x30, x29
153
154#if ENABLE_ASSERTIONS
155	/*
156	 * Error handling flows might involve long jumps; so upon returning from
157	 * the platform error handler, validate that the we've completely
158	 * unwound the stack.
159	 */
160	mov	x27, sp
161	cmp	x28, x27
162	ASM_ASSERT(eq)
163#endif
164
165	/* Make SP point to context */
166	msr	spsel, #1
167
168	/* Restore EL3 state */
169	ldp	x1, x2, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
170	msr	spsr_el3, x1
171	msr	elr_el3, x2
172
173	/* Restore ESR_EL3 and SCR_EL3 */
174	ldp	x3, x4, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
175	msr	scr_el3, x3
176	msr	esr_el3, x4
177
178	ret
179endfunc ea_proceed
180