xref: /rk3399_ARM-atf/lib/el3_runtime/aarch64/context.S (revision d04c04a4e8d968f9f82de810a3c763474e3faeb7)
1532ed618SSoby Mathew/*
2ed804406SRohit Mathew * Copyright (c) 2013-2023, Arm Limited and Contributors. All rights reserved.
3532ed618SSoby Mathew *
482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause
5532ed618SSoby Mathew */
6532ed618SSoby Mathew
7532ed618SSoby Mathew#include <arch.h>
8532ed618SSoby Mathew#include <asm_macros.S>
9bb9549baSJan Dabros#include <assert_macros.S>
10532ed618SSoby Mathew#include <context.h>
113b8456bdSManish V Badarkhe#include <el3_common_macros.S>
12532ed618SSoby Mathew
13532ed618SSoby Mathew	.global	el1_sysregs_context_save
14532ed618SSoby Mathew	.global	el1_sysregs_context_restore
15532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS
16532ed618SSoby Mathew	.global	fpregs_context_save
17532ed618SSoby Mathew	.global	fpregs_context_restore
180ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_FPREGS */
1997215e0fSDaniel Boulby	.global	prepare_el3_entry
20ed108b56SAlexei Fedorov	.global	restore_gp_pmcr_pauth_regs
213b8456bdSManish V Badarkhe	.global save_and_update_ptw_el1_sys_regs
22532ed618SSoby Mathew	.global	el3_exit
23532ed618SSoby Mathew
2428f39f02SMax Shvetsov
25ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
26ed108b56SAlexei Fedorov * The following function strictly follows the AArch64 PCS to use
27ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers) to save EL1 system
28ed108b56SAlexei Fedorov * register context. It assumes that 'x0' is pointing to a
29ed108b56SAlexei Fedorov * 'el1_sys_regs' structure where the register context will be saved.
30ed108b56SAlexei Fedorov * ------------------------------------------------------------------
31532ed618SSoby Mathew */
32532ed618SSoby Mathewfunc el1_sysregs_context_save
33532ed618SSoby Mathew
34532ed618SSoby Mathew	mrs	x9, spsr_el1
35532ed618SSoby Mathew	mrs	x10, elr_el1
36532ed618SSoby Mathew	stp	x9, x10, [x0, #CTX_SPSR_EL1]
37532ed618SSoby Mathew
383b8456bdSManish V Badarkhe#if !ERRATA_SPECULATIVE_AT
39532ed618SSoby Mathew	mrs	x15, sctlr_el1
40cb55615cSManish V Badarkhe	mrs	x16, tcr_el1
41532ed618SSoby Mathew	stp	x15, x16, [x0, #CTX_SCTLR_EL1]
420ce220afSJayanth Dodderi Chidanand#endif /* ERRATA_SPECULATIVE_AT */
43532ed618SSoby Mathew
44532ed618SSoby Mathew	mrs	x17, cpacr_el1
45532ed618SSoby Mathew	mrs	x9, csselr_el1
46532ed618SSoby Mathew	stp	x17, x9, [x0, #CTX_CPACR_EL1]
47532ed618SSoby Mathew
48532ed618SSoby Mathew	mrs	x10, sp_el1
49532ed618SSoby Mathew	mrs	x11, esr_el1
50532ed618SSoby Mathew	stp	x10, x11, [x0, #CTX_SP_EL1]
51532ed618SSoby Mathew
52532ed618SSoby Mathew	mrs	x12, ttbr0_el1
53532ed618SSoby Mathew	mrs	x13, ttbr1_el1
54532ed618SSoby Mathew	stp	x12, x13, [x0, #CTX_TTBR0_EL1]
55532ed618SSoby Mathew
56532ed618SSoby Mathew	mrs	x14, mair_el1
57532ed618SSoby Mathew	mrs	x15, amair_el1
58532ed618SSoby Mathew	stp	x14, x15, [x0, #CTX_MAIR_EL1]
59532ed618SSoby Mathew
60cb55615cSManish V Badarkhe	mrs	x16, actlr_el1
61532ed618SSoby Mathew	mrs	x17, tpidr_el1
62cb55615cSManish V Badarkhe	stp	x16, x17, [x0, #CTX_ACTLR_EL1]
63532ed618SSoby Mathew
64532ed618SSoby Mathew	mrs	x9, tpidr_el0
65532ed618SSoby Mathew	mrs	x10, tpidrro_el0
66532ed618SSoby Mathew	stp	x9, x10, [x0, #CTX_TPIDR_EL0]
67532ed618SSoby Mathew
68532ed618SSoby Mathew	mrs	x13, par_el1
69532ed618SSoby Mathew	mrs	x14, far_el1
70532ed618SSoby Mathew	stp	x13, x14, [x0, #CTX_PAR_EL1]
71532ed618SSoby Mathew
72532ed618SSoby Mathew	mrs	x15, afsr0_el1
73532ed618SSoby Mathew	mrs	x16, afsr1_el1
74532ed618SSoby Mathew	stp	x15, x16, [x0, #CTX_AFSR0_EL1]
75532ed618SSoby Mathew
76532ed618SSoby Mathew	mrs	x17, contextidr_el1
77532ed618SSoby Mathew	mrs	x9, vbar_el1
78532ed618SSoby Mathew	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
79532ed618SSoby Mathew
80532ed618SSoby Mathew	/* Save AArch32 system registers if the build has instructed so */
81532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS
82532ed618SSoby Mathew	mrs	x11, spsr_abt
83532ed618SSoby Mathew	mrs	x12, spsr_und
84532ed618SSoby Mathew	stp	x11, x12, [x0, #CTX_SPSR_ABT]
85532ed618SSoby Mathew
86532ed618SSoby Mathew	mrs	x13, spsr_irq
87532ed618SSoby Mathew	mrs	x14, spsr_fiq
88532ed618SSoby Mathew	stp	x13, x14, [x0, #CTX_SPSR_IRQ]
89532ed618SSoby Mathew
90532ed618SSoby Mathew	mrs	x15, dacr32_el2
91532ed618SSoby Mathew	mrs	x16, ifsr32_el2
92532ed618SSoby Mathew	stp	x15, x16, [x0, #CTX_DACR32_EL2]
930ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */
94532ed618SSoby Mathew
95532ed618SSoby Mathew	/* Save NS timer registers if the build has instructed so */
96532ed618SSoby Mathew#if NS_TIMER_SWITCH
97532ed618SSoby Mathew	mrs	x10, cntp_ctl_el0
98532ed618SSoby Mathew	mrs	x11, cntp_cval_el0
99532ed618SSoby Mathew	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
100532ed618SSoby Mathew
101532ed618SSoby Mathew	mrs	x12, cntv_ctl_el0
102532ed618SSoby Mathew	mrs	x13, cntv_cval_el0
103532ed618SSoby Mathew	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
104532ed618SSoby Mathew
105532ed618SSoby Mathew	mrs	x14, cntkctl_el1
106532ed618SSoby Mathew	str	x14, [x0, #CTX_CNTKCTL_EL1]
1070ce220afSJayanth Dodderi Chidanand#endif /* NS_TIMER_SWITCH */
108532ed618SSoby Mathew
1099dd94382SJustin Chadwell	/* Save MTE system registers if the build has instructed so */
1109dd94382SJustin Chadwell#if CTX_INCLUDE_MTE_REGS
1119dd94382SJustin Chadwell	mrs	x15, TFSRE0_EL1
1129dd94382SJustin Chadwell	mrs	x16, TFSR_EL1
1139dd94382SJustin Chadwell	stp	x15, x16, [x0, #CTX_TFSRE0_EL1]
1149dd94382SJustin Chadwell
1159dd94382SJustin Chadwell	mrs	x9, RGSR_EL1
1169dd94382SJustin Chadwell	mrs	x10, GCR_EL1
1179dd94382SJustin Chadwell	stp	x9, x10, [x0, #CTX_RGSR_EL1]
1180ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_MTE_REGS */
1199dd94382SJustin Chadwell
120532ed618SSoby Mathew	ret
121532ed618SSoby Mathewendfunc el1_sysregs_context_save
122532ed618SSoby Mathew
123ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
124ed108b56SAlexei Fedorov * The following function strictly follows the AArch64 PCS to use
125ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers) to restore EL1 system
126ed108b56SAlexei Fedorov * register context.  It assumes that 'x0' is pointing to a
127ed108b56SAlexei Fedorov * 'el1_sys_regs' structure from where the register context will be
128ed108b56SAlexei Fedorov * restored
129ed108b56SAlexei Fedorov * ------------------------------------------------------------------
130532ed618SSoby Mathew */
131532ed618SSoby Mathewfunc el1_sysregs_context_restore
132532ed618SSoby Mathew
133532ed618SSoby Mathew	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
134532ed618SSoby Mathew	msr	spsr_el1, x9
135532ed618SSoby Mathew	msr	elr_el1, x10
136532ed618SSoby Mathew
1373b8456bdSManish V Badarkhe#if !ERRATA_SPECULATIVE_AT
138fb2072b0SManish V Badarkhe	ldp	x15, x16, [x0, #CTX_SCTLR_EL1]
139fb2072b0SManish V Badarkhe	msr	sctlr_el1, x15
140cb55615cSManish V Badarkhe	msr	tcr_el1, x16
1410ce220afSJayanth Dodderi Chidanand#endif /* ERRATA_SPECULATIVE_AT */
142532ed618SSoby Mathew
143532ed618SSoby Mathew	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
144532ed618SSoby Mathew	msr	cpacr_el1, x17
145532ed618SSoby Mathew	msr	csselr_el1, x9
146532ed618SSoby Mathew
147532ed618SSoby Mathew	ldp	x10, x11, [x0, #CTX_SP_EL1]
148532ed618SSoby Mathew	msr	sp_el1, x10
149532ed618SSoby Mathew	msr	esr_el1, x11
150532ed618SSoby Mathew
151532ed618SSoby Mathew	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
152532ed618SSoby Mathew	msr	ttbr0_el1, x12
153532ed618SSoby Mathew	msr	ttbr1_el1, x13
154532ed618SSoby Mathew
155532ed618SSoby Mathew	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
156532ed618SSoby Mathew	msr	mair_el1, x14
157532ed618SSoby Mathew	msr	amair_el1, x15
158532ed618SSoby Mathew
159cb55615cSManish V Badarkhe	ldp 	x16, x17, [x0, #CTX_ACTLR_EL1]
160cb55615cSManish V Badarkhe	msr	actlr_el1, x16
161fb2072b0SManish V Badarkhe	msr	tpidr_el1, x17
162532ed618SSoby Mathew
163532ed618SSoby Mathew	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
164532ed618SSoby Mathew	msr	tpidr_el0, x9
165532ed618SSoby Mathew	msr	tpidrro_el0, x10
166532ed618SSoby Mathew
167532ed618SSoby Mathew	ldp	x13, x14, [x0, #CTX_PAR_EL1]
168532ed618SSoby Mathew	msr	par_el1, x13
169532ed618SSoby Mathew	msr	far_el1, x14
170532ed618SSoby Mathew
171532ed618SSoby Mathew	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
172532ed618SSoby Mathew	msr	afsr0_el1, x15
173532ed618SSoby Mathew	msr	afsr1_el1, x16
174532ed618SSoby Mathew
175532ed618SSoby Mathew	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
176532ed618SSoby Mathew	msr	contextidr_el1, x17
177532ed618SSoby Mathew	msr	vbar_el1, x9
178532ed618SSoby Mathew
179532ed618SSoby Mathew	/* Restore AArch32 system registers if the build has instructed so */
180532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS
181532ed618SSoby Mathew	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
182532ed618SSoby Mathew	msr	spsr_abt, x11
183532ed618SSoby Mathew	msr	spsr_und, x12
184532ed618SSoby Mathew
185532ed618SSoby Mathew	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
186532ed618SSoby Mathew	msr	spsr_irq, x13
187532ed618SSoby Mathew	msr	spsr_fiq, x14
188532ed618SSoby Mathew
189532ed618SSoby Mathew	ldp	x15, x16, [x0, #CTX_DACR32_EL2]
190532ed618SSoby Mathew	msr	dacr32_el2, x15
191532ed618SSoby Mathew	msr	ifsr32_el2, x16
1920ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */
1930ce220afSJayanth Dodderi Chidanand
194532ed618SSoby Mathew	/* Restore NS timer registers if the build has instructed so */
195532ed618SSoby Mathew#if NS_TIMER_SWITCH
196532ed618SSoby Mathew	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
197532ed618SSoby Mathew	msr	cntp_ctl_el0, x10
198532ed618SSoby Mathew	msr	cntp_cval_el0, x11
199532ed618SSoby Mathew
200532ed618SSoby Mathew	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
201532ed618SSoby Mathew	msr	cntv_ctl_el0, x12
202532ed618SSoby Mathew	msr	cntv_cval_el0, x13
203532ed618SSoby Mathew
204532ed618SSoby Mathew	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
205532ed618SSoby Mathew	msr	cntkctl_el1, x14
2060ce220afSJayanth Dodderi Chidanand#endif /* NS_TIMER_SWITCH */
2070ce220afSJayanth Dodderi Chidanand
2089dd94382SJustin Chadwell	/* Restore MTE system registers if the build has instructed so */
2099dd94382SJustin Chadwell#if CTX_INCLUDE_MTE_REGS
2109dd94382SJustin Chadwell	ldp	x11, x12, [x0, #CTX_TFSRE0_EL1]
2119dd94382SJustin Chadwell	msr	TFSRE0_EL1, x11
2129dd94382SJustin Chadwell	msr	TFSR_EL1, x12
2139dd94382SJustin Chadwell
2149dd94382SJustin Chadwell	ldp	x13, x14, [x0, #CTX_RGSR_EL1]
2159dd94382SJustin Chadwell	msr	RGSR_EL1, x13
2169dd94382SJustin Chadwell	msr	GCR_EL1, x14
2170ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_MTE_REGS */
218532ed618SSoby Mathew
219532ed618SSoby Mathew	/* No explict ISB required here as ERET covers it */
220532ed618SSoby Mathew	ret
221532ed618SSoby Mathewendfunc el1_sysregs_context_restore
222532ed618SSoby Mathew
223ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
224ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use
225ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
226ed108b56SAlexei Fedorov * to save floating point register context. It assumes that 'x0' is
227ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure where the register context will
228532ed618SSoby Mathew * be saved.
229532ed618SSoby Mathew *
230ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set.
231ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in
232ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared.
233532ed618SSoby Mathew *
234532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world
235ed108b56SAlexei Fedorov * ------------------------------------------------------------------
236532ed618SSoby Mathew */
237532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS
238532ed618SSoby Mathewfunc fpregs_context_save
239532ed618SSoby Mathew	stp	q0, q1, [x0, #CTX_FP_Q0]
240532ed618SSoby Mathew	stp	q2, q3, [x0, #CTX_FP_Q2]
241532ed618SSoby Mathew	stp	q4, q5, [x0, #CTX_FP_Q4]
242532ed618SSoby Mathew	stp	q6, q7, [x0, #CTX_FP_Q6]
243532ed618SSoby Mathew	stp	q8, q9, [x0, #CTX_FP_Q8]
244532ed618SSoby Mathew	stp	q10, q11, [x0, #CTX_FP_Q10]
245532ed618SSoby Mathew	stp	q12, q13, [x0, #CTX_FP_Q12]
246532ed618SSoby Mathew	stp	q14, q15, [x0, #CTX_FP_Q14]
247532ed618SSoby Mathew	stp	q16, q17, [x0, #CTX_FP_Q16]
248532ed618SSoby Mathew	stp	q18, q19, [x0, #CTX_FP_Q18]
249532ed618SSoby Mathew	stp	q20, q21, [x0, #CTX_FP_Q20]
250532ed618SSoby Mathew	stp	q22, q23, [x0, #CTX_FP_Q22]
251532ed618SSoby Mathew	stp	q24, q25, [x0, #CTX_FP_Q24]
252532ed618SSoby Mathew	stp	q26, q27, [x0, #CTX_FP_Q26]
253532ed618SSoby Mathew	stp	q28, q29, [x0, #CTX_FP_Q28]
254532ed618SSoby Mathew	stp	q30, q31, [x0, #CTX_FP_Q30]
255532ed618SSoby Mathew
256532ed618SSoby Mathew	mrs	x9, fpsr
257532ed618SSoby Mathew	str	x9, [x0, #CTX_FP_FPSR]
258532ed618SSoby Mathew
259532ed618SSoby Mathew	mrs	x10, fpcr
260532ed618SSoby Mathew	str	x10, [x0, #CTX_FP_FPCR]
261532ed618SSoby Mathew
26291089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS
26391089f36SDavid Cunado	mrs	x11, fpexc32_el2
26491089f36SDavid Cunado	str	x11, [x0, #CTX_FP_FPEXC32_EL2]
2650ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */
266532ed618SSoby Mathew	ret
267532ed618SSoby Mathewendfunc fpregs_context_save
268532ed618SSoby Mathew
269ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
270ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use x9-x17
271ed108b56SAlexei Fedorov * (temporary caller-saved registers according to AArch64 PCS) to
272ed108b56SAlexei Fedorov * restore floating point register context. It assumes that 'x0' is
273ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure from where the register context
274532ed618SSoby Mathew * will be restored.
275532ed618SSoby Mathew *
276ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set.
277ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in
278ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared.
279532ed618SSoby Mathew *
280532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world
281ed108b56SAlexei Fedorov * ------------------------------------------------------------------
282532ed618SSoby Mathew */
283532ed618SSoby Mathewfunc fpregs_context_restore
284532ed618SSoby Mathew	ldp	q0, q1, [x0, #CTX_FP_Q0]
285532ed618SSoby Mathew	ldp	q2, q3, [x0, #CTX_FP_Q2]
286532ed618SSoby Mathew	ldp	q4, q5, [x0, #CTX_FP_Q4]
287532ed618SSoby Mathew	ldp	q6, q7, [x0, #CTX_FP_Q6]
288532ed618SSoby Mathew	ldp	q8, q9, [x0, #CTX_FP_Q8]
289532ed618SSoby Mathew	ldp	q10, q11, [x0, #CTX_FP_Q10]
290532ed618SSoby Mathew	ldp	q12, q13, [x0, #CTX_FP_Q12]
291532ed618SSoby Mathew	ldp	q14, q15, [x0, #CTX_FP_Q14]
292532ed618SSoby Mathew	ldp	q16, q17, [x0, #CTX_FP_Q16]
293532ed618SSoby Mathew	ldp	q18, q19, [x0, #CTX_FP_Q18]
294532ed618SSoby Mathew	ldp	q20, q21, [x0, #CTX_FP_Q20]
295532ed618SSoby Mathew	ldp	q22, q23, [x0, #CTX_FP_Q22]
296532ed618SSoby Mathew	ldp	q24, q25, [x0, #CTX_FP_Q24]
297532ed618SSoby Mathew	ldp	q26, q27, [x0, #CTX_FP_Q26]
298532ed618SSoby Mathew	ldp	q28, q29, [x0, #CTX_FP_Q28]
299532ed618SSoby Mathew	ldp	q30, q31, [x0, #CTX_FP_Q30]
300532ed618SSoby Mathew
301532ed618SSoby Mathew	ldr	x9, [x0, #CTX_FP_FPSR]
302532ed618SSoby Mathew	msr	fpsr, x9
303532ed618SSoby Mathew
304532ed618SSoby Mathew	ldr	x10, [x0, #CTX_FP_FPCR]
305532ed618SSoby Mathew	msr	fpcr, x10
306532ed618SSoby Mathew
30791089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS
30891089f36SDavid Cunado	ldr	x11, [x0, #CTX_FP_FPEXC32_EL2]
30991089f36SDavid Cunado	msr	fpexc32_el2, x11
3100ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */
3110ce220afSJayanth Dodderi Chidanand
312532ed618SSoby Mathew	/*
313532ed618SSoby Mathew	 * No explict ISB required here as ERET to
314532ed618SSoby Mathew	 * switch to secure EL1 or non-secure world
315532ed618SSoby Mathew	 * covers it
316532ed618SSoby Mathew	 */
317532ed618SSoby Mathew
318532ed618SSoby Mathew	ret
319532ed618SSoby Mathewendfunc fpregs_context_restore
320532ed618SSoby Mathew#endif /* CTX_INCLUDE_FPREGS */
321532ed618SSoby Mathew
3227d33ffe4SDaniel Boulby	/*
3231cbe42a5SManish Pandey	 * Set SCR_EL3.EA bit to enable SErrors at EL3
3241cbe42a5SManish Pandey	 */
3251cbe42a5SManish Pandey	.macro enable_serror_at_el3
3261cbe42a5SManish Pandey	mrs     x8, scr_el3
3271cbe42a5SManish Pandey	orr     x8, x8, #SCR_EA_BIT
3281cbe42a5SManish Pandey	msr     scr_el3, x8
3291cbe42a5SManish Pandey	.endm
3301cbe42a5SManish Pandey
3311cbe42a5SManish Pandey	/*
3327d33ffe4SDaniel Boulby	 * Set the PSTATE bits not set when the exception was taken as
3337d33ffe4SDaniel Boulby	 * described in the AArch64.TakeException() pseudocode function
3347d33ffe4SDaniel Boulby	 * in ARM DDI 0487F.c page J1-7635 to a default value.
3357d33ffe4SDaniel Boulby	 */
3367d33ffe4SDaniel Boulby	.macro set_unset_pstate_bits
3377d33ffe4SDaniel Boulby	/*
3387d33ffe4SDaniel Boulby	 * If Data Independent Timing (DIT) functionality is implemented,
3397d33ffe4SDaniel Boulby	 * always enable DIT in EL3
3407d33ffe4SDaniel Boulby	 */
3417d33ffe4SDaniel Boulby#if ENABLE_FEAT_DIT
34288727fc3SAndre Przywara#if ENABLE_FEAT_DIT == 2
34388727fc3SAndre Przywara	mrs	x8, id_aa64pfr0_el1
34488727fc3SAndre Przywara	and	x8, x8, #(ID_AA64PFR0_DIT_MASK << ID_AA64PFR0_DIT_SHIFT)
34588727fc3SAndre Przywara	cbz	x8, 1f
34688727fc3SAndre Przywara#endif
3477d33ffe4SDaniel Boulby	mov     x8, #DIT_BIT
3487d33ffe4SDaniel Boulby	msr     DIT, x8
34988727fc3SAndre Przywara1:
3507d33ffe4SDaniel Boulby#endif /* ENABLE_FEAT_DIT */
3517d33ffe4SDaniel Boulby	.endm /* set_unset_pstate_bits */
3527d33ffe4SDaniel Boulby
353edebefbcSArvind Ram Prakash/*-------------------------------------------------------------------------
354edebefbcSArvind Ram Prakash * This macro checks the ENABLE_FEAT_MPAM state, performs ID register
355edebefbcSArvind Ram Prakash * check to see if the platform supports MPAM extension and restores MPAM3
356edebefbcSArvind Ram Prakash * register value if it is FEAT_STATE_ENABLED/FEAT_STATE_CHECKED.
357edebefbcSArvind Ram Prakash *
358edebefbcSArvind Ram Prakash * This is particularly more complicated because we can't check
359edebefbcSArvind Ram Prakash * if the platform supports MPAM  by looking for status of a particular bit
360edebefbcSArvind Ram Prakash * in the MDCR_EL3 or CPTR_EL3 register like other extensions.
361edebefbcSArvind Ram Prakash * ------------------------------------------------------------------------
362edebefbcSArvind Ram Prakash */
363edebefbcSArvind Ram Prakash
364edebefbcSArvind Ram Prakash	.macro	restore_mpam3_el3
365edebefbcSArvind Ram Prakash#if ENABLE_FEAT_MPAM
366edebefbcSArvind Ram Prakash#if ENABLE_FEAT_MPAM == 2
367edebefbcSArvind Ram Prakash
368edebefbcSArvind Ram Prakash	mrs x8, id_aa64pfr0_el1
369edebefbcSArvind Ram Prakash	lsr x8, x8, #(ID_AA64PFR0_MPAM_SHIFT)
370edebefbcSArvind Ram Prakash	and x8, x8, #(ID_AA64PFR0_MPAM_MASK)
371edebefbcSArvind Ram Prakash	mrs x7, id_aa64pfr1_el1
372edebefbcSArvind Ram Prakash	lsr x7, x7, #(ID_AA64PFR1_MPAM_FRAC_SHIFT)
373edebefbcSArvind Ram Prakash	and x7, x7, #(ID_AA64PFR1_MPAM_FRAC_MASK)
374edebefbcSArvind Ram Prakash	orr x7, x7, x8
375edebefbcSArvind Ram Prakash	cbz x7, no_mpam
376edebefbcSArvind Ram Prakash#endif
377edebefbcSArvind Ram Prakash	/* -----------------------------------------------------------
378edebefbcSArvind Ram Prakash	 * Restore MPAM3_EL3 register as per context state
379edebefbcSArvind Ram Prakash	 * Currently we only enable MPAM for NS world and trap to EL3
380edebefbcSArvind Ram Prakash	 * for MPAM access in lower ELs of Secure and Realm world
381edebefbcSArvind Ram Prakash	 * -----------------------------------------------------------
382edebefbcSArvind Ram Prakash	 */
383edebefbcSArvind Ram Prakash	ldr	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_MPAM3_EL3]
384edebefbcSArvind Ram Prakash	msr	S3_6_C10_C5_0, x17 /* mpam3_el3 */
385edebefbcSArvind Ram Prakash
386edebefbcSArvind Ram Prakashno_mpam:
387edebefbcSArvind Ram Prakash#endif
388edebefbcSArvind Ram Prakash	.endm /* restore_mpam3_el3 */
389edebefbcSArvind Ram Prakash
390ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
39197215e0fSDaniel Boulby * The following macro is used to save and restore all the general
392ed108b56SAlexei Fedorov * purpose and ARMv8.3-PAuth (if enabled) registers.
393d64bfef5SJayanth Dodderi Chidanand * It also checks if the Secure Cycle Counter (PMCCNTR_EL0)
394d64bfef5SJayanth Dodderi Chidanand * is disabled in EL3/Secure (ARMv8.5-PMU), wherein PMCCNTR_EL0
395d64bfef5SJayanth Dodderi Chidanand * needs not to be saved/restored during world switch.
396ed108b56SAlexei Fedorov *
397ed108b56SAlexei Fedorov * Ideally we would only save and restore the callee saved registers
398ed108b56SAlexei Fedorov * when a world switch occurs but that type of implementation is more
399ed108b56SAlexei Fedorov * complex. So currently we will always save and restore these
400ed108b56SAlexei Fedorov * registers on entry and exit of EL3.
401532ed618SSoby Mathew * clobbers: x18
402ed108b56SAlexei Fedorov * ------------------------------------------------------------------
403532ed618SSoby Mathew */
40497215e0fSDaniel Boulby	.macro save_gp_pmcr_pauth_regs
405532ed618SSoby Mathew	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
406532ed618SSoby Mathew	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
407532ed618SSoby Mathew	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
408532ed618SSoby Mathew	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
409532ed618SSoby Mathew	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
410532ed618SSoby Mathew	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
411532ed618SSoby Mathew	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
412532ed618SSoby Mathew	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
413532ed618SSoby Mathew	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
414532ed618SSoby Mathew	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
415532ed618SSoby Mathew	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
416532ed618SSoby Mathew	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
417532ed618SSoby Mathew	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
418532ed618SSoby Mathew	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
419532ed618SSoby Mathew	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
420532ed618SSoby Mathew	mrs	x18, sp_el0
421532ed618SSoby Mathew	str	x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
422c73686a1SBoyan Karatotev
423c73686a1SBoyan Karatotev	/* PMUv3 is presumed to be always present */
424ed108b56SAlexei Fedorov	mrs	x9, pmcr_el0
425ed108b56SAlexei Fedorov	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
426ed108b56SAlexei Fedorov	/* Disable cycle counter when event counting is prohibited */
4271d6d6802SBoyan Karatotev	orr	x9, x9, #PMCR_EL0_DP_BIT
428ed108b56SAlexei Fedorov	msr	pmcr_el0, x9
429ed108b56SAlexei Fedorov	isb
430ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS
431ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
432ed108b56SAlexei Fedorov 	 * Save the ARMv8.3-PAuth keys as they are not banked
433ed108b56SAlexei Fedorov 	 * by exception level
434ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
435ed108b56SAlexei Fedorov	 */
436ed108b56SAlexei Fedorov	add	x19, sp, #CTX_PAUTH_REGS_OFFSET
437ed108b56SAlexei Fedorov
438ed108b56SAlexei Fedorov	mrs	x20, APIAKeyLo_EL1	/* x21:x20 = APIAKey */
439ed108b56SAlexei Fedorov	mrs	x21, APIAKeyHi_EL1
440ed108b56SAlexei Fedorov	mrs	x22, APIBKeyLo_EL1	/* x23:x22 = APIBKey */
441ed108b56SAlexei Fedorov	mrs	x23, APIBKeyHi_EL1
442ed108b56SAlexei Fedorov	mrs	x24, APDAKeyLo_EL1	/* x25:x24 = APDAKey */
443ed108b56SAlexei Fedorov	mrs	x25, APDAKeyHi_EL1
444ed108b56SAlexei Fedorov	mrs	x26, APDBKeyLo_EL1	/* x27:x26 = APDBKey */
445ed108b56SAlexei Fedorov	mrs	x27, APDBKeyHi_EL1
446ed108b56SAlexei Fedorov	mrs	x28, APGAKeyLo_EL1	/* x29:x28 = APGAKey */
447ed108b56SAlexei Fedorov	mrs	x29, APGAKeyHi_EL1
448ed108b56SAlexei Fedorov
449ed108b56SAlexei Fedorov	stp	x20, x21, [x19, #CTX_PACIAKEY_LO]
450ed108b56SAlexei Fedorov	stp	x22, x23, [x19, #CTX_PACIBKEY_LO]
451ed108b56SAlexei Fedorov	stp	x24, x25, [x19, #CTX_PACDAKEY_LO]
452ed108b56SAlexei Fedorov	stp	x26, x27, [x19, #CTX_PACDBKEY_LO]
453ed108b56SAlexei Fedorov	stp	x28, x29, [x19, #CTX_PACGAKEY_LO]
454ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */
45597215e0fSDaniel Boulby	.endm /* save_gp_pmcr_pauth_regs */
45697215e0fSDaniel Boulby
45797215e0fSDaniel Boulby/* -----------------------------------------------------------------
4587d33ffe4SDaniel Boulby * This function saves the context and sets the PSTATE to a known
4597d33ffe4SDaniel Boulby * state, preparing entry to el3.
46097215e0fSDaniel Boulby * Save all the general purpose and ARMv8.3-PAuth (if enabled)
46197215e0fSDaniel Boulby * registers.
4627d33ffe4SDaniel Boulby * Then set any of the PSTATE bits that are not set by hardware
4637d33ffe4SDaniel Boulby * according to the Aarch64.TakeException pseudocode in the Arm
4647d33ffe4SDaniel Boulby * Architecture Reference Manual to a default value for EL3.
4657d33ffe4SDaniel Boulby * clobbers: x17
46697215e0fSDaniel Boulby * -----------------------------------------------------------------
46797215e0fSDaniel Boulby */
46897215e0fSDaniel Boulbyfunc prepare_el3_entry
46997215e0fSDaniel Boulby	save_gp_pmcr_pauth_regs
4701cbe42a5SManish Pandey	enable_serror_at_el3
4717d33ffe4SDaniel Boulby	/*
4727d33ffe4SDaniel Boulby	 * Set the PSTATE bits not described in the Aarch64.TakeException
4737d33ffe4SDaniel Boulby	 * pseudocode to their default values.
4747d33ffe4SDaniel Boulby	 */
4757d33ffe4SDaniel Boulby	set_unset_pstate_bits
476ed108b56SAlexei Fedorov	ret
47797215e0fSDaniel Boulbyendfunc prepare_el3_entry
478ed108b56SAlexei Fedorov
479ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
480ed108b56SAlexei Fedorov * This function restores ARMv8.3-PAuth (if enabled) and all general
481ed108b56SAlexei Fedorov * purpose registers except x30 from the CPU context.
482ed108b56SAlexei Fedorov * x30 register must be explicitly restored by the caller.
483ed108b56SAlexei Fedorov * ------------------------------------------------------------------
484ed108b56SAlexei Fedorov */
485ed108b56SAlexei Fedorovfunc restore_gp_pmcr_pauth_regs
486ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS
487ed108b56SAlexei Fedorov 	/* Restore the ARMv8.3 PAuth keys */
488ed108b56SAlexei Fedorov	add	x10, sp, #CTX_PAUTH_REGS_OFFSET
489ed108b56SAlexei Fedorov
490ed108b56SAlexei Fedorov	ldp	x0, x1, [x10, #CTX_PACIAKEY_LO]	/* x1:x0 = APIAKey */
491ed108b56SAlexei Fedorov	ldp	x2, x3, [x10, #CTX_PACIBKEY_LO]	/* x3:x2 = APIBKey */
492ed108b56SAlexei Fedorov	ldp	x4, x5, [x10, #CTX_PACDAKEY_LO]	/* x5:x4 = APDAKey */
493ed108b56SAlexei Fedorov	ldp	x6, x7, [x10, #CTX_PACDBKEY_LO]	/* x7:x6 = APDBKey */
494ed108b56SAlexei Fedorov	ldp	x8, x9, [x10, #CTX_PACGAKEY_LO]	/* x9:x8 = APGAKey */
495ed108b56SAlexei Fedorov
496ed108b56SAlexei Fedorov	msr	APIAKeyLo_EL1, x0
497ed108b56SAlexei Fedorov	msr	APIAKeyHi_EL1, x1
498ed108b56SAlexei Fedorov	msr	APIBKeyLo_EL1, x2
499ed108b56SAlexei Fedorov	msr	APIBKeyHi_EL1, x3
500ed108b56SAlexei Fedorov	msr	APDAKeyLo_EL1, x4
501ed108b56SAlexei Fedorov	msr	APDAKeyHi_EL1, x5
502ed108b56SAlexei Fedorov	msr	APDBKeyLo_EL1, x6
503ed108b56SAlexei Fedorov	msr	APDBKeyHi_EL1, x7
504ed108b56SAlexei Fedorov	msr	APGAKeyLo_EL1, x8
505ed108b56SAlexei Fedorov	msr	APGAKeyHi_EL1, x9
506ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */
507c73686a1SBoyan Karatotev
508c73686a1SBoyan Karatotev	/* PMUv3 is presumed to be always present */
509ed108b56SAlexei Fedorov	ldr	x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
510ed108b56SAlexei Fedorov	msr	pmcr_el0, x0
511532ed618SSoby Mathew	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
512532ed618SSoby Mathew	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
513532ed618SSoby Mathew	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
514532ed618SSoby Mathew	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
515532ed618SSoby Mathew	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
516532ed618SSoby Mathew	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
517532ed618SSoby Mathew	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
518532ed618SSoby Mathew	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
519ef653d93SJeenu Viswambharan	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
520532ed618SSoby Mathew	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
521532ed618SSoby Mathew	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
522532ed618SSoby Mathew	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
523532ed618SSoby Mathew	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
524532ed618SSoby Mathew	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
525ef653d93SJeenu Viswambharan	ldr	x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
526ef653d93SJeenu Viswambharan	msr	sp_el0, x28
527532ed618SSoby Mathew	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
528ef653d93SJeenu Viswambharan	ret
529ed108b56SAlexei Fedorovendfunc restore_gp_pmcr_pauth_regs
530ef653d93SJeenu Viswambharan
5313b8456bdSManish V Badarkhe/*
5323b8456bdSManish V Badarkhe * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
5333b8456bdSManish V Badarkhe * registers and update EL1 registers to disable stage1 and stage2
5343b8456bdSManish V Badarkhe * page table walk
5353b8456bdSManish V Badarkhe */
5363b8456bdSManish V Badarkhefunc save_and_update_ptw_el1_sys_regs
5373b8456bdSManish V Badarkhe	/* ----------------------------------------------------------
5383b8456bdSManish V Badarkhe	 * Save only sctlr_el1 and tcr_el1 registers
5393b8456bdSManish V Badarkhe	 * ----------------------------------------------------------
5403b8456bdSManish V Badarkhe	 */
5413b8456bdSManish V Badarkhe	mrs	x29, sctlr_el1
5423b8456bdSManish V Badarkhe	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
5433b8456bdSManish V Badarkhe	mrs	x29, tcr_el1
5443b8456bdSManish V Badarkhe	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
5453b8456bdSManish V Badarkhe
5463b8456bdSManish V Badarkhe	/* ------------------------------------------------------------
5473b8456bdSManish V Badarkhe	 * Must follow below order in order to disable page table
5483b8456bdSManish V Badarkhe	 * walk for lower ELs (EL1 and EL0). First step ensures that
5493b8456bdSManish V Badarkhe	 * page table walk is disabled for stage1 and second step
5503b8456bdSManish V Badarkhe	 * ensures that page table walker should use TCR_EL1.EPDx
5513b8456bdSManish V Badarkhe	 * bits to perform address translation. ISB ensures that CPU
5523b8456bdSManish V Badarkhe	 * does these 2 steps in order.
5533b8456bdSManish V Badarkhe	 *
5543b8456bdSManish V Badarkhe	 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
5553b8456bdSManish V Badarkhe	 *    stage1.
5563b8456bdSManish V Badarkhe	 * 2. Enable MMU bit to avoid identity mapping via stage2
5573b8456bdSManish V Badarkhe	 *    and force TCR_EL1.EPDx to be used by the page table
5583b8456bdSManish V Badarkhe	 *    walker.
5593b8456bdSManish V Badarkhe	 * ------------------------------------------------------------
5603b8456bdSManish V Badarkhe	 */
5613b8456bdSManish V Badarkhe	orr	x29, x29, #(TCR_EPD0_BIT)
5623b8456bdSManish V Badarkhe	orr	x29, x29, #(TCR_EPD1_BIT)
5633b8456bdSManish V Badarkhe	msr	tcr_el1, x29
5643b8456bdSManish V Badarkhe	isb
5653b8456bdSManish V Badarkhe	mrs	x29, sctlr_el1
5663b8456bdSManish V Badarkhe	orr	x29, x29, #SCTLR_M_BIT
5673b8456bdSManish V Badarkhe	msr	sctlr_el1, x29
5683b8456bdSManish V Badarkhe	isb
5693b8456bdSManish V Badarkhe
5703b8456bdSManish V Badarkhe	ret
5713b8456bdSManish V Badarkheendfunc save_and_update_ptw_el1_sys_regs
5723b8456bdSManish V Badarkhe
573461c0a5dSElizabeth Ho/* -----------------------------------------------------------------
574461c0a5dSElizabeth Ho* The below macro returns the address of the per_world context for
575461c0a5dSElizabeth Ho* the security state, retrieved through "get_security_state" macro.
576461c0a5dSElizabeth Ho* The per_world context address is returned in the register argument.
577461c0a5dSElizabeth Ho* Clobbers: x9, x10
578461c0a5dSElizabeth Ho* ------------------------------------------------------------------
579461c0a5dSElizabeth Ho*/
580461c0a5dSElizabeth Ho
581461c0a5dSElizabeth Ho.macro get_per_world_context _reg:req
582461c0a5dSElizabeth Ho	ldr 	x10, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
583461c0a5dSElizabeth Ho	get_security_state x9, x10
584461c0a5dSElizabeth Ho	mov_imm	x10, (CTX_GLOBAL_EL3STATE_END - CTX_CPTR_EL3)
585461c0a5dSElizabeth Ho	mul	x9, x9, x10
586461c0a5dSElizabeth Ho	adrp	x10, per_world_context
587461c0a5dSElizabeth Ho	add	x10, x10, :lo12:per_world_context
588461c0a5dSElizabeth Ho	add	x9, x9, x10
589461c0a5dSElizabeth Ho	mov 	\_reg, x9
590461c0a5dSElizabeth Ho.endm
591461c0a5dSElizabeth Ho
592ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
593ed108b56SAlexei Fedorov * This routine assumes that the SP_EL3 is pointing to a valid
594ed108b56SAlexei Fedorov * context structure from where the gp regs and other special
595ed108b56SAlexei Fedorov * registers can be retrieved.
596ed108b56SAlexei Fedorov * ------------------------------------------------------------------
597532ed618SSoby Mathew */
598532ed618SSoby Mathewfunc el3_exit
599bb9549baSJan Dabros#if ENABLE_ASSERTIONS
600bb9549baSJan Dabros	/* el3_exit assumes SP_EL0 on entry */
601bb9549baSJan Dabros	mrs	x17, spsel
602bb9549baSJan Dabros	cmp	x17, #MODE_SP_EL0
603bb9549baSJan Dabros	ASM_ASSERT(eq)
6040ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_ASSERTIONS */
605bb9549baSJan Dabros
606ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
607ed108b56SAlexei Fedorov	 * Save the current SP_EL0 i.e. the EL3 runtime stack which
608ed108b56SAlexei Fedorov	 * will be used for handling the next SMC.
609ed108b56SAlexei Fedorov	 * Then switch to SP_EL3.
610ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
611532ed618SSoby Mathew	 */
612532ed618SSoby Mathew	mov	x17, sp
613ed108b56SAlexei Fedorov	msr	spsel, #MODE_SP_ELX
614532ed618SSoby Mathew	str	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
615532ed618SSoby Mathew
6160c5e7d1cSMax Shvetsov	/* ----------------------------------------------------------
61768ac5ed0SArunachalam Ganapathy	 * Restore CPTR_EL3.
6180c5e7d1cSMax Shvetsov	 * ZCR is only restored if SVE is supported and enabled.
6190c5e7d1cSMax Shvetsov	 * Synchronization is required before zcr_el3 is addressed.
6200c5e7d1cSMax Shvetsov	 * ----------------------------------------------------------
6210c5e7d1cSMax Shvetsov	 */
622461c0a5dSElizabeth Ho
623461c0a5dSElizabeth Ho	/* The address of the per_world context is stored in x9 */
624461c0a5dSElizabeth Ho	get_per_world_context x9
625461c0a5dSElizabeth Ho
626461c0a5dSElizabeth Ho	ldp	x19, x20, [x9, #CTX_CPTR_EL3]
6270c5e7d1cSMax Shvetsov	msr	cptr_el3, x19
6280c5e7d1cSMax Shvetsov
629f0c96a2eSBoyan Karatotev#if IMAGE_BL31
6300c5e7d1cSMax Shvetsov	ands	x19, x19, #CPTR_EZ_BIT
6310c5e7d1cSMax Shvetsov	beq	sve_not_enabled
6320c5e7d1cSMax Shvetsov
6330c5e7d1cSMax Shvetsov	isb
6340c5e7d1cSMax Shvetsov	msr	S3_6_C1_C2_0, x20 /* zcr_el3 */
6350c5e7d1cSMax Shvetsovsve_not_enabled:
636edebefbcSArvind Ram Prakash
637edebefbcSArvind Ram Prakash	restore_mpam3_el3
638edebefbcSArvind Ram Prakash
6390ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 */
6400c5e7d1cSMax Shvetsov
641fe007b2eSDimitris Papastamos#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
642ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
643ed108b56SAlexei Fedorov	 * Restore mitigation state as it was on entry to EL3
644ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
645ed108b56SAlexei Fedorov	 */
646fe007b2eSDimitris Papastamos	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
647ed108b56SAlexei Fedorov	cbz	x17, 1f
648fe007b2eSDimitris Papastamos	blr	x17
6494d1ccf0eSAntonio Nino Diaz1:
6500ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */
6510ce220afSJayanth Dodderi Chidanand
6526503ff29SAndre Przywara/*
6536503ff29SAndre Przywara * This is a hot path, so we don't want to do some actual FEAT_RAS runtime
6546503ff29SAndre Przywara * detection here. The "esb" is a cheaper variant, so using "dsb" in the
6556503ff29SAndre Przywara * ENABLE_FEAT_RAS==2 case is not ideal, but won't hurt.
6566503ff29SAndre Przywara */
6576503ff29SAndre Przywara#if IMAGE_BL31 && ENABLE_FEAT_RAS == 1
658ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
659ed108b56SAlexei Fedorov	 * Issue Error Synchronization Barrier to synchronize SErrors
660ed108b56SAlexei Fedorov	 * before exiting EL3. We're running with EAs unmasked, so
661ed108b56SAlexei Fedorov	 * any synchronized errors would be taken immediately;
662ed108b56SAlexei Fedorov	 * therefore no need to inspect DISR_EL1 register.
663ed108b56SAlexei Fedorov 	 * ----------------------------------------------------------
664ed108b56SAlexei Fedorov	 */
665ed108b56SAlexei Fedorov	esb
666c2d32a5fSMadhukar Pappireddy#else
667c2d32a5fSMadhukar Pappireddy	dsb	sy
6689202d519SManish Pandey#endif /* IMAGE_BL31 && ENABLE_FEAT_RAS */
6690ce220afSJayanth Dodderi Chidanand
670ff1d2ef3SManish Pandey	/* ----------------------------------------------------------
671ff1d2ef3SManish Pandey	 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
672ff1d2ef3SManish Pandey	 * ----------------------------------------------------------
673ff1d2ef3SManish Pandey	 */
674ff1d2ef3SManish Pandey	ldr	x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
675ff1d2ef3SManish Pandey	ldp	x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
676ff1d2ef3SManish Pandey	msr	scr_el3, x18
677ff1d2ef3SManish Pandey	msr	spsr_el3, x16
678ff1d2ef3SManish Pandey	msr	elr_el3, x17
679ff1d2ef3SManish Pandey
680ff1d2ef3SManish Pandey	restore_ptw_el1_sys_regs
681ff1d2ef3SManish Pandey
682ff1d2ef3SManish Pandey	/* ----------------------------------------------------------
683ff1d2ef3SManish Pandey	 * Restore general purpose (including x30), PMCR_EL0 and
684ff1d2ef3SManish Pandey	 * ARMv8.3-PAuth registers.
685ff1d2ef3SManish Pandey	 * Exit EL3 via ERET to a lower exception level.
686ff1d2ef3SManish Pandey 	 * ----------------------------------------------------------
687ff1d2ef3SManish Pandey 	 */
688ff1d2ef3SManish Pandey	bl	restore_gp_pmcr_pauth_regs
689ff1d2ef3SManish Pandey	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
690ff1d2ef3SManish Pandey
691c2d32a5fSMadhukar Pappireddy#ifdef IMAGE_BL31
692*d04c04a4SManish Pandey	/* Clear the EL3 flag as we are exiting el3 */
693*d04c04a4SManish Pandey	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_NESTED_EA_FLAG]
6940ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 */
6950ce220afSJayanth Dodderi Chidanand
696f461fe34SAnthony Steinhauser	exception_return
6975283962eSAntonio Nino Diaz
698532ed618SSoby Mathewendfunc el3_exit
699