xref: /rk3399_ARM-atf/lib/el3_runtime/aarch64/context.S (revision ac4f6aaf859ca4e4175a52f90b9617dc5c9b715b)
1532ed618SSoby Mathew/*
2ed804406SRohit Mathew * Copyright (c) 2013-2023, Arm Limited and Contributors. All rights reserved.
3532ed618SSoby Mathew *
482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause
5532ed618SSoby Mathew */
6532ed618SSoby Mathew
7532ed618SSoby Mathew#include <arch.h>
8532ed618SSoby Mathew#include <asm_macros.S>
9bb9549baSJan Dabros#include <assert_macros.S>
10532ed618SSoby Mathew#include <context.h>
113b8456bdSManish V Badarkhe#include <el3_common_macros.S>
12532ed618SSoby Mathew
13532ed618SSoby Mathew	.global	el1_sysregs_context_save
14532ed618SSoby Mathew	.global	el1_sysregs_context_restore
15532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS
16532ed618SSoby Mathew	.global	fpregs_context_save
17532ed618SSoby Mathew	.global	fpregs_context_restore
180ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_FPREGS */
1997215e0fSDaniel Boulby	.global	prepare_el3_entry
20ed108b56SAlexei Fedorov	.global	restore_gp_pmcr_pauth_regs
213b8456bdSManish V Badarkhe	.global save_and_update_ptw_el1_sys_regs
22532ed618SSoby Mathew	.global	el3_exit
23532ed618SSoby Mathew
2428f39f02SMax Shvetsov
25ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
26ed108b56SAlexei Fedorov * The following function strictly follows the AArch64 PCS to use
27ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers) to save EL1 system
28ed108b56SAlexei Fedorov * register context. It assumes that 'x0' is pointing to a
29ed108b56SAlexei Fedorov * 'el1_sys_regs' structure where the register context will be saved.
30ed108b56SAlexei Fedorov * ------------------------------------------------------------------
31532ed618SSoby Mathew */
32532ed618SSoby Mathewfunc el1_sysregs_context_save
33532ed618SSoby Mathew
34532ed618SSoby Mathew	mrs	x9, spsr_el1
35532ed618SSoby Mathew	mrs	x10, elr_el1
36532ed618SSoby Mathew	stp	x9, x10, [x0, #CTX_SPSR_EL1]
37532ed618SSoby Mathew
383b8456bdSManish V Badarkhe#if !ERRATA_SPECULATIVE_AT
39532ed618SSoby Mathew	mrs	x15, sctlr_el1
40cb55615cSManish V Badarkhe	mrs	x16, tcr_el1
41532ed618SSoby Mathew	stp	x15, x16, [x0, #CTX_SCTLR_EL1]
420ce220afSJayanth Dodderi Chidanand#endif /* ERRATA_SPECULATIVE_AT */
43532ed618SSoby Mathew
44532ed618SSoby Mathew	mrs	x17, cpacr_el1
45532ed618SSoby Mathew	mrs	x9, csselr_el1
46532ed618SSoby Mathew	stp	x17, x9, [x0, #CTX_CPACR_EL1]
47532ed618SSoby Mathew
48532ed618SSoby Mathew	mrs	x10, sp_el1
49532ed618SSoby Mathew	mrs	x11, esr_el1
50532ed618SSoby Mathew	stp	x10, x11, [x0, #CTX_SP_EL1]
51532ed618SSoby Mathew
52532ed618SSoby Mathew	mrs	x12, ttbr0_el1
53532ed618SSoby Mathew	mrs	x13, ttbr1_el1
54532ed618SSoby Mathew	stp	x12, x13, [x0, #CTX_TTBR0_EL1]
55532ed618SSoby Mathew
56532ed618SSoby Mathew	mrs	x14, mair_el1
57532ed618SSoby Mathew	mrs	x15, amair_el1
58532ed618SSoby Mathew	stp	x14, x15, [x0, #CTX_MAIR_EL1]
59532ed618SSoby Mathew
60cb55615cSManish V Badarkhe	mrs	x16, actlr_el1
61532ed618SSoby Mathew	mrs	x17, tpidr_el1
62cb55615cSManish V Badarkhe	stp	x16, x17, [x0, #CTX_ACTLR_EL1]
63532ed618SSoby Mathew
64532ed618SSoby Mathew	mrs	x9, tpidr_el0
65532ed618SSoby Mathew	mrs	x10, tpidrro_el0
66532ed618SSoby Mathew	stp	x9, x10, [x0, #CTX_TPIDR_EL0]
67532ed618SSoby Mathew
68532ed618SSoby Mathew	mrs	x13, par_el1
69532ed618SSoby Mathew	mrs	x14, far_el1
70532ed618SSoby Mathew	stp	x13, x14, [x0, #CTX_PAR_EL1]
71532ed618SSoby Mathew
72532ed618SSoby Mathew	mrs	x15, afsr0_el1
73532ed618SSoby Mathew	mrs	x16, afsr1_el1
74532ed618SSoby Mathew	stp	x15, x16, [x0, #CTX_AFSR0_EL1]
75532ed618SSoby Mathew
76532ed618SSoby Mathew	mrs	x17, contextidr_el1
77532ed618SSoby Mathew	mrs	x9, vbar_el1
78532ed618SSoby Mathew	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
79532ed618SSoby Mathew
80532ed618SSoby Mathew	/* Save AArch32 system registers if the build has instructed so */
81532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS
82532ed618SSoby Mathew	mrs	x11, spsr_abt
83532ed618SSoby Mathew	mrs	x12, spsr_und
84532ed618SSoby Mathew	stp	x11, x12, [x0, #CTX_SPSR_ABT]
85532ed618SSoby Mathew
86532ed618SSoby Mathew	mrs	x13, spsr_irq
87532ed618SSoby Mathew	mrs	x14, spsr_fiq
88532ed618SSoby Mathew	stp	x13, x14, [x0, #CTX_SPSR_IRQ]
89532ed618SSoby Mathew
90532ed618SSoby Mathew	mrs	x15, dacr32_el2
91532ed618SSoby Mathew	mrs	x16, ifsr32_el2
92532ed618SSoby Mathew	stp	x15, x16, [x0, #CTX_DACR32_EL2]
930ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */
94532ed618SSoby Mathew
95532ed618SSoby Mathew	/* Save NS timer registers if the build has instructed so */
96532ed618SSoby Mathew#if NS_TIMER_SWITCH
97532ed618SSoby Mathew	mrs	x10, cntp_ctl_el0
98532ed618SSoby Mathew	mrs	x11, cntp_cval_el0
99532ed618SSoby Mathew	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
100532ed618SSoby Mathew
101532ed618SSoby Mathew	mrs	x12, cntv_ctl_el0
102532ed618SSoby Mathew	mrs	x13, cntv_cval_el0
103532ed618SSoby Mathew	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
104532ed618SSoby Mathew
105532ed618SSoby Mathew	mrs	x14, cntkctl_el1
106532ed618SSoby Mathew	str	x14, [x0, #CTX_CNTKCTL_EL1]
1070ce220afSJayanth Dodderi Chidanand#endif /* NS_TIMER_SWITCH */
108532ed618SSoby Mathew
1099dd94382SJustin Chadwell	/* Save MTE system registers if the build has instructed so */
1109dd94382SJustin Chadwell#if CTX_INCLUDE_MTE_REGS
1119dd94382SJustin Chadwell	mrs	x15, TFSRE0_EL1
1129dd94382SJustin Chadwell	mrs	x16, TFSR_EL1
1139dd94382SJustin Chadwell	stp	x15, x16, [x0, #CTX_TFSRE0_EL1]
1149dd94382SJustin Chadwell
1159dd94382SJustin Chadwell	mrs	x9, RGSR_EL1
1169dd94382SJustin Chadwell	mrs	x10, GCR_EL1
1179dd94382SJustin Chadwell	stp	x9, x10, [x0, #CTX_RGSR_EL1]
1180ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_MTE_REGS */
1199dd94382SJustin Chadwell
120532ed618SSoby Mathew	ret
121532ed618SSoby Mathewendfunc el1_sysregs_context_save
122532ed618SSoby Mathew
123ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
124ed108b56SAlexei Fedorov * The following function strictly follows the AArch64 PCS to use
125ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers) to restore EL1 system
126ed108b56SAlexei Fedorov * register context.  It assumes that 'x0' is pointing to a
127ed108b56SAlexei Fedorov * 'el1_sys_regs' structure from where the register context will be
128ed108b56SAlexei Fedorov * restored
129ed108b56SAlexei Fedorov * ------------------------------------------------------------------
130532ed618SSoby Mathew */
131532ed618SSoby Mathewfunc el1_sysregs_context_restore
132532ed618SSoby Mathew
133532ed618SSoby Mathew	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
134532ed618SSoby Mathew	msr	spsr_el1, x9
135532ed618SSoby Mathew	msr	elr_el1, x10
136532ed618SSoby Mathew
1373b8456bdSManish V Badarkhe#if !ERRATA_SPECULATIVE_AT
138fb2072b0SManish V Badarkhe	ldp	x15, x16, [x0, #CTX_SCTLR_EL1]
139fb2072b0SManish V Badarkhe	msr	sctlr_el1, x15
140cb55615cSManish V Badarkhe	msr	tcr_el1, x16
1410ce220afSJayanth Dodderi Chidanand#endif /* ERRATA_SPECULATIVE_AT */
142532ed618SSoby Mathew
143532ed618SSoby Mathew	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
144532ed618SSoby Mathew	msr	cpacr_el1, x17
145532ed618SSoby Mathew	msr	csselr_el1, x9
146532ed618SSoby Mathew
147532ed618SSoby Mathew	ldp	x10, x11, [x0, #CTX_SP_EL1]
148532ed618SSoby Mathew	msr	sp_el1, x10
149532ed618SSoby Mathew	msr	esr_el1, x11
150532ed618SSoby Mathew
151532ed618SSoby Mathew	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
152532ed618SSoby Mathew	msr	ttbr0_el1, x12
153532ed618SSoby Mathew	msr	ttbr1_el1, x13
154532ed618SSoby Mathew
155532ed618SSoby Mathew	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
156532ed618SSoby Mathew	msr	mair_el1, x14
157532ed618SSoby Mathew	msr	amair_el1, x15
158532ed618SSoby Mathew
159cb55615cSManish V Badarkhe	ldp 	x16, x17, [x0, #CTX_ACTLR_EL1]
160cb55615cSManish V Badarkhe	msr	actlr_el1, x16
161fb2072b0SManish V Badarkhe	msr	tpidr_el1, x17
162532ed618SSoby Mathew
163532ed618SSoby Mathew	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
164532ed618SSoby Mathew	msr	tpidr_el0, x9
165532ed618SSoby Mathew	msr	tpidrro_el0, x10
166532ed618SSoby Mathew
167532ed618SSoby Mathew	ldp	x13, x14, [x0, #CTX_PAR_EL1]
168532ed618SSoby Mathew	msr	par_el1, x13
169532ed618SSoby Mathew	msr	far_el1, x14
170532ed618SSoby Mathew
171532ed618SSoby Mathew	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
172532ed618SSoby Mathew	msr	afsr0_el1, x15
173532ed618SSoby Mathew	msr	afsr1_el1, x16
174532ed618SSoby Mathew
175532ed618SSoby Mathew	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
176532ed618SSoby Mathew	msr	contextidr_el1, x17
177532ed618SSoby Mathew	msr	vbar_el1, x9
178532ed618SSoby Mathew
179532ed618SSoby Mathew	/* Restore AArch32 system registers if the build has instructed so */
180532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS
181532ed618SSoby Mathew	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
182532ed618SSoby Mathew	msr	spsr_abt, x11
183532ed618SSoby Mathew	msr	spsr_und, x12
184532ed618SSoby Mathew
185532ed618SSoby Mathew	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
186532ed618SSoby Mathew	msr	spsr_irq, x13
187532ed618SSoby Mathew	msr	spsr_fiq, x14
188532ed618SSoby Mathew
189532ed618SSoby Mathew	ldp	x15, x16, [x0, #CTX_DACR32_EL2]
190532ed618SSoby Mathew	msr	dacr32_el2, x15
191532ed618SSoby Mathew	msr	ifsr32_el2, x16
1920ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */
1930ce220afSJayanth Dodderi Chidanand
194532ed618SSoby Mathew	/* Restore NS timer registers if the build has instructed so */
195532ed618SSoby Mathew#if NS_TIMER_SWITCH
196532ed618SSoby Mathew	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
197532ed618SSoby Mathew	msr	cntp_ctl_el0, x10
198532ed618SSoby Mathew	msr	cntp_cval_el0, x11
199532ed618SSoby Mathew
200532ed618SSoby Mathew	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
201532ed618SSoby Mathew	msr	cntv_ctl_el0, x12
202532ed618SSoby Mathew	msr	cntv_cval_el0, x13
203532ed618SSoby Mathew
204532ed618SSoby Mathew	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
205532ed618SSoby Mathew	msr	cntkctl_el1, x14
2060ce220afSJayanth Dodderi Chidanand#endif /* NS_TIMER_SWITCH */
2070ce220afSJayanth Dodderi Chidanand
2089dd94382SJustin Chadwell	/* Restore MTE system registers if the build has instructed so */
2099dd94382SJustin Chadwell#if CTX_INCLUDE_MTE_REGS
2109dd94382SJustin Chadwell	ldp	x11, x12, [x0, #CTX_TFSRE0_EL1]
2119dd94382SJustin Chadwell	msr	TFSRE0_EL1, x11
2129dd94382SJustin Chadwell	msr	TFSR_EL1, x12
2139dd94382SJustin Chadwell
2149dd94382SJustin Chadwell	ldp	x13, x14, [x0, #CTX_RGSR_EL1]
2159dd94382SJustin Chadwell	msr	RGSR_EL1, x13
2169dd94382SJustin Chadwell	msr	GCR_EL1, x14
2170ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_MTE_REGS */
218532ed618SSoby Mathew
219532ed618SSoby Mathew	/* No explict ISB required here as ERET covers it */
220532ed618SSoby Mathew	ret
221532ed618SSoby Mathewendfunc el1_sysregs_context_restore
222532ed618SSoby Mathew
223ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
224ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use
225ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
226ed108b56SAlexei Fedorov * to save floating point register context. It assumes that 'x0' is
227ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure where the register context will
228532ed618SSoby Mathew * be saved.
229532ed618SSoby Mathew *
230ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set.
231ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in
232ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared.
233532ed618SSoby Mathew *
234532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world
235ed108b56SAlexei Fedorov * ------------------------------------------------------------------
236532ed618SSoby Mathew */
237532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS
238532ed618SSoby Mathewfunc fpregs_context_save
239532ed618SSoby Mathew	stp	q0, q1, [x0, #CTX_FP_Q0]
240532ed618SSoby Mathew	stp	q2, q3, [x0, #CTX_FP_Q2]
241532ed618SSoby Mathew	stp	q4, q5, [x0, #CTX_FP_Q4]
242532ed618SSoby Mathew	stp	q6, q7, [x0, #CTX_FP_Q6]
243532ed618SSoby Mathew	stp	q8, q9, [x0, #CTX_FP_Q8]
244532ed618SSoby Mathew	stp	q10, q11, [x0, #CTX_FP_Q10]
245532ed618SSoby Mathew	stp	q12, q13, [x0, #CTX_FP_Q12]
246532ed618SSoby Mathew	stp	q14, q15, [x0, #CTX_FP_Q14]
247532ed618SSoby Mathew	stp	q16, q17, [x0, #CTX_FP_Q16]
248532ed618SSoby Mathew	stp	q18, q19, [x0, #CTX_FP_Q18]
249532ed618SSoby Mathew	stp	q20, q21, [x0, #CTX_FP_Q20]
250532ed618SSoby Mathew	stp	q22, q23, [x0, #CTX_FP_Q22]
251532ed618SSoby Mathew	stp	q24, q25, [x0, #CTX_FP_Q24]
252532ed618SSoby Mathew	stp	q26, q27, [x0, #CTX_FP_Q26]
253532ed618SSoby Mathew	stp	q28, q29, [x0, #CTX_FP_Q28]
254532ed618SSoby Mathew	stp	q30, q31, [x0, #CTX_FP_Q30]
255532ed618SSoby Mathew
256532ed618SSoby Mathew	mrs	x9, fpsr
257532ed618SSoby Mathew	str	x9, [x0, #CTX_FP_FPSR]
258532ed618SSoby Mathew
259532ed618SSoby Mathew	mrs	x10, fpcr
260532ed618SSoby Mathew	str	x10, [x0, #CTX_FP_FPCR]
261532ed618SSoby Mathew
26291089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS
26391089f36SDavid Cunado	mrs	x11, fpexc32_el2
26491089f36SDavid Cunado	str	x11, [x0, #CTX_FP_FPEXC32_EL2]
2650ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */
266532ed618SSoby Mathew	ret
267532ed618SSoby Mathewendfunc fpregs_context_save
268532ed618SSoby Mathew
269ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
270ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use x9-x17
271ed108b56SAlexei Fedorov * (temporary caller-saved registers according to AArch64 PCS) to
272ed108b56SAlexei Fedorov * restore floating point register context. It assumes that 'x0' is
273ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure from where the register context
274532ed618SSoby Mathew * will be restored.
275532ed618SSoby Mathew *
276ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set.
277ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in
278ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared.
279532ed618SSoby Mathew *
280532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world
281ed108b56SAlexei Fedorov * ------------------------------------------------------------------
282532ed618SSoby Mathew */
283532ed618SSoby Mathewfunc fpregs_context_restore
284532ed618SSoby Mathew	ldp	q0, q1, [x0, #CTX_FP_Q0]
285532ed618SSoby Mathew	ldp	q2, q3, [x0, #CTX_FP_Q2]
286532ed618SSoby Mathew	ldp	q4, q5, [x0, #CTX_FP_Q4]
287532ed618SSoby Mathew	ldp	q6, q7, [x0, #CTX_FP_Q6]
288532ed618SSoby Mathew	ldp	q8, q9, [x0, #CTX_FP_Q8]
289532ed618SSoby Mathew	ldp	q10, q11, [x0, #CTX_FP_Q10]
290532ed618SSoby Mathew	ldp	q12, q13, [x0, #CTX_FP_Q12]
291532ed618SSoby Mathew	ldp	q14, q15, [x0, #CTX_FP_Q14]
292532ed618SSoby Mathew	ldp	q16, q17, [x0, #CTX_FP_Q16]
293532ed618SSoby Mathew	ldp	q18, q19, [x0, #CTX_FP_Q18]
294532ed618SSoby Mathew	ldp	q20, q21, [x0, #CTX_FP_Q20]
295532ed618SSoby Mathew	ldp	q22, q23, [x0, #CTX_FP_Q22]
296532ed618SSoby Mathew	ldp	q24, q25, [x0, #CTX_FP_Q24]
297532ed618SSoby Mathew	ldp	q26, q27, [x0, #CTX_FP_Q26]
298532ed618SSoby Mathew	ldp	q28, q29, [x0, #CTX_FP_Q28]
299532ed618SSoby Mathew	ldp	q30, q31, [x0, #CTX_FP_Q30]
300532ed618SSoby Mathew
301532ed618SSoby Mathew	ldr	x9, [x0, #CTX_FP_FPSR]
302532ed618SSoby Mathew	msr	fpsr, x9
303532ed618SSoby Mathew
304532ed618SSoby Mathew	ldr	x10, [x0, #CTX_FP_FPCR]
305532ed618SSoby Mathew	msr	fpcr, x10
306532ed618SSoby Mathew
30791089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS
30891089f36SDavid Cunado	ldr	x11, [x0, #CTX_FP_FPEXC32_EL2]
30991089f36SDavid Cunado	msr	fpexc32_el2, x11
3100ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */
3110ce220afSJayanth Dodderi Chidanand
312532ed618SSoby Mathew	/*
313532ed618SSoby Mathew	 * No explict ISB required here as ERET to
314532ed618SSoby Mathew	 * switch to secure EL1 or non-secure world
315532ed618SSoby Mathew	 * covers it
316532ed618SSoby Mathew	 */
317532ed618SSoby Mathew
318532ed618SSoby Mathew	ret
319532ed618SSoby Mathewendfunc fpregs_context_restore
320532ed618SSoby Mathew#endif /* CTX_INCLUDE_FPREGS */
321532ed618SSoby Mathew
3227d33ffe4SDaniel Boulby	/*
3231cbe42a5SManish Pandey	 * Set SCR_EL3.EA bit to enable SErrors at EL3
3241cbe42a5SManish Pandey	 */
3251cbe42a5SManish Pandey	.macro enable_serror_at_el3
3261cbe42a5SManish Pandey	mrs     x8, scr_el3
3271cbe42a5SManish Pandey	orr     x8, x8, #SCR_EA_BIT
3281cbe42a5SManish Pandey	msr     scr_el3, x8
3291cbe42a5SManish Pandey	.endm
3301cbe42a5SManish Pandey
3311cbe42a5SManish Pandey	/*
3327d33ffe4SDaniel Boulby	 * Set the PSTATE bits not set when the exception was taken as
3337d33ffe4SDaniel Boulby	 * described in the AArch64.TakeException() pseudocode function
3347d33ffe4SDaniel Boulby	 * in ARM DDI 0487F.c page J1-7635 to a default value.
3357d33ffe4SDaniel Boulby	 */
3367d33ffe4SDaniel Boulby	.macro set_unset_pstate_bits
3377d33ffe4SDaniel Boulby	/*
3387d33ffe4SDaniel Boulby	 * If Data Independent Timing (DIT) functionality is implemented,
3397d33ffe4SDaniel Boulby	 * always enable DIT in EL3
3407d33ffe4SDaniel Boulby	 */
3417d33ffe4SDaniel Boulby#if ENABLE_FEAT_DIT
34288727fc3SAndre Przywara#if ENABLE_FEAT_DIT == 2
34388727fc3SAndre Przywara	mrs	x8, id_aa64pfr0_el1
34488727fc3SAndre Przywara	and	x8, x8, #(ID_AA64PFR0_DIT_MASK << ID_AA64PFR0_DIT_SHIFT)
34588727fc3SAndre Przywara	cbz	x8, 1f
34688727fc3SAndre Przywara#endif
3477d33ffe4SDaniel Boulby	mov     x8, #DIT_BIT
3487d33ffe4SDaniel Boulby	msr     DIT, x8
34988727fc3SAndre Przywara1:
3507d33ffe4SDaniel Boulby#endif /* ENABLE_FEAT_DIT */
3517d33ffe4SDaniel Boulby	.endm /* set_unset_pstate_bits */
3527d33ffe4SDaniel Boulby
353edebefbcSArvind Ram Prakash/*-------------------------------------------------------------------------
354edebefbcSArvind Ram Prakash * This macro checks the ENABLE_FEAT_MPAM state, performs ID register
355edebefbcSArvind Ram Prakash * check to see if the platform supports MPAM extension and restores MPAM3
356edebefbcSArvind Ram Prakash * register value if it is FEAT_STATE_ENABLED/FEAT_STATE_CHECKED.
357edebefbcSArvind Ram Prakash *
358edebefbcSArvind Ram Prakash * This is particularly more complicated because we can't check
359edebefbcSArvind Ram Prakash * if the platform supports MPAM  by looking for status of a particular bit
360edebefbcSArvind Ram Prakash * in the MDCR_EL3 or CPTR_EL3 register like other extensions.
361edebefbcSArvind Ram Prakash * ------------------------------------------------------------------------
362edebefbcSArvind Ram Prakash */
363edebefbcSArvind Ram Prakash
364edebefbcSArvind Ram Prakash	.macro	restore_mpam3_el3
365edebefbcSArvind Ram Prakash#if ENABLE_FEAT_MPAM
366edebefbcSArvind Ram Prakash#if ENABLE_FEAT_MPAM == 2
367edebefbcSArvind Ram Prakash
368edebefbcSArvind Ram Prakash	mrs x8, id_aa64pfr0_el1
369edebefbcSArvind Ram Prakash	lsr x8, x8, #(ID_AA64PFR0_MPAM_SHIFT)
370edebefbcSArvind Ram Prakash	and x8, x8, #(ID_AA64PFR0_MPAM_MASK)
371edebefbcSArvind Ram Prakash	mrs x7, id_aa64pfr1_el1
372edebefbcSArvind Ram Prakash	lsr x7, x7, #(ID_AA64PFR1_MPAM_FRAC_SHIFT)
373edebefbcSArvind Ram Prakash	and x7, x7, #(ID_AA64PFR1_MPAM_FRAC_MASK)
374edebefbcSArvind Ram Prakash	orr x7, x7, x8
375edebefbcSArvind Ram Prakash	cbz x7, no_mpam
376edebefbcSArvind Ram Prakash#endif
377edebefbcSArvind Ram Prakash	/* -----------------------------------------------------------
378edebefbcSArvind Ram Prakash	 * Restore MPAM3_EL3 register as per context state
379edebefbcSArvind Ram Prakash	 * Currently we only enable MPAM for NS world and trap to EL3
380edebefbcSArvind Ram Prakash	 * for MPAM access in lower ELs of Secure and Realm world
381*ac4f6aafSArvind Ram Prakash	 * x9 holds address of the per_world context
382edebefbcSArvind Ram Prakash	 * -----------------------------------------------------------
383edebefbcSArvind Ram Prakash	 */
384*ac4f6aafSArvind Ram Prakash
385*ac4f6aafSArvind Ram Prakash	ldr	x17, [x9, #CTX_MPAM3_EL3]
386edebefbcSArvind Ram Prakash	msr	S3_6_C10_C5_0, x17 /* mpam3_el3 */
387edebefbcSArvind Ram Prakash
388edebefbcSArvind Ram Prakashno_mpam:
389edebefbcSArvind Ram Prakash#endif
390edebefbcSArvind Ram Prakash	.endm /* restore_mpam3_el3 */
391edebefbcSArvind Ram Prakash
392ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
39397215e0fSDaniel Boulby * The following macro is used to save and restore all the general
394ed108b56SAlexei Fedorov * purpose and ARMv8.3-PAuth (if enabled) registers.
395d64bfef5SJayanth Dodderi Chidanand * It also checks if the Secure Cycle Counter (PMCCNTR_EL0)
396d64bfef5SJayanth Dodderi Chidanand * is disabled in EL3/Secure (ARMv8.5-PMU), wherein PMCCNTR_EL0
397d64bfef5SJayanth Dodderi Chidanand * needs not to be saved/restored during world switch.
398ed108b56SAlexei Fedorov *
399ed108b56SAlexei Fedorov * Ideally we would only save and restore the callee saved registers
400ed108b56SAlexei Fedorov * when a world switch occurs but that type of implementation is more
401ed108b56SAlexei Fedorov * complex. So currently we will always save and restore these
402ed108b56SAlexei Fedorov * registers on entry and exit of EL3.
403532ed618SSoby Mathew * clobbers: x18
404ed108b56SAlexei Fedorov * ------------------------------------------------------------------
405532ed618SSoby Mathew */
40697215e0fSDaniel Boulby	.macro save_gp_pmcr_pauth_regs
407532ed618SSoby Mathew	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
408532ed618SSoby Mathew	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
409532ed618SSoby Mathew	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
410532ed618SSoby Mathew	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
411532ed618SSoby Mathew	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
412532ed618SSoby Mathew	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
413532ed618SSoby Mathew	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
414532ed618SSoby Mathew	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
415532ed618SSoby Mathew	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
416532ed618SSoby Mathew	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
417532ed618SSoby Mathew	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
418532ed618SSoby Mathew	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
419532ed618SSoby Mathew	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
420532ed618SSoby Mathew	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
421532ed618SSoby Mathew	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
422532ed618SSoby Mathew	mrs	x18, sp_el0
423532ed618SSoby Mathew	str	x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
424c73686a1SBoyan Karatotev
425c73686a1SBoyan Karatotev	/* PMUv3 is presumed to be always present */
426ed108b56SAlexei Fedorov	mrs	x9, pmcr_el0
427ed108b56SAlexei Fedorov	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
428ed108b56SAlexei Fedorov	/* Disable cycle counter when event counting is prohibited */
4291d6d6802SBoyan Karatotev	orr	x9, x9, #PMCR_EL0_DP_BIT
430ed108b56SAlexei Fedorov	msr	pmcr_el0, x9
431ed108b56SAlexei Fedorov	isb
432ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS
433ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
434ed108b56SAlexei Fedorov 	 * Save the ARMv8.3-PAuth keys as they are not banked
435ed108b56SAlexei Fedorov 	 * by exception level
436ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
437ed108b56SAlexei Fedorov	 */
438ed108b56SAlexei Fedorov	add	x19, sp, #CTX_PAUTH_REGS_OFFSET
439ed108b56SAlexei Fedorov
440ed108b56SAlexei Fedorov	mrs	x20, APIAKeyLo_EL1	/* x21:x20 = APIAKey */
441ed108b56SAlexei Fedorov	mrs	x21, APIAKeyHi_EL1
442ed108b56SAlexei Fedorov	mrs	x22, APIBKeyLo_EL1	/* x23:x22 = APIBKey */
443ed108b56SAlexei Fedorov	mrs	x23, APIBKeyHi_EL1
444ed108b56SAlexei Fedorov	mrs	x24, APDAKeyLo_EL1	/* x25:x24 = APDAKey */
445ed108b56SAlexei Fedorov	mrs	x25, APDAKeyHi_EL1
446ed108b56SAlexei Fedorov	mrs	x26, APDBKeyLo_EL1	/* x27:x26 = APDBKey */
447ed108b56SAlexei Fedorov	mrs	x27, APDBKeyHi_EL1
448ed108b56SAlexei Fedorov	mrs	x28, APGAKeyLo_EL1	/* x29:x28 = APGAKey */
449ed108b56SAlexei Fedorov	mrs	x29, APGAKeyHi_EL1
450ed108b56SAlexei Fedorov
451ed108b56SAlexei Fedorov	stp	x20, x21, [x19, #CTX_PACIAKEY_LO]
452ed108b56SAlexei Fedorov	stp	x22, x23, [x19, #CTX_PACIBKEY_LO]
453ed108b56SAlexei Fedorov	stp	x24, x25, [x19, #CTX_PACDAKEY_LO]
454ed108b56SAlexei Fedorov	stp	x26, x27, [x19, #CTX_PACDBKEY_LO]
455ed108b56SAlexei Fedorov	stp	x28, x29, [x19, #CTX_PACGAKEY_LO]
456ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */
45797215e0fSDaniel Boulby	.endm /* save_gp_pmcr_pauth_regs */
45897215e0fSDaniel Boulby
45997215e0fSDaniel Boulby/* -----------------------------------------------------------------
4607d33ffe4SDaniel Boulby * This function saves the context and sets the PSTATE to a known
4617d33ffe4SDaniel Boulby * state, preparing entry to el3.
46297215e0fSDaniel Boulby * Save all the general purpose and ARMv8.3-PAuth (if enabled)
46397215e0fSDaniel Boulby * registers.
4647d33ffe4SDaniel Boulby * Then set any of the PSTATE bits that are not set by hardware
4657d33ffe4SDaniel Boulby * according to the Aarch64.TakeException pseudocode in the Arm
4667d33ffe4SDaniel Boulby * Architecture Reference Manual to a default value for EL3.
4677d33ffe4SDaniel Boulby * clobbers: x17
46897215e0fSDaniel Boulby * -----------------------------------------------------------------
46997215e0fSDaniel Boulby */
47097215e0fSDaniel Boulbyfunc prepare_el3_entry
47197215e0fSDaniel Boulby	save_gp_pmcr_pauth_regs
4721cbe42a5SManish Pandey	enable_serror_at_el3
4737d33ffe4SDaniel Boulby	/*
4747d33ffe4SDaniel Boulby	 * Set the PSTATE bits not described in the Aarch64.TakeException
4757d33ffe4SDaniel Boulby	 * pseudocode to their default values.
4767d33ffe4SDaniel Boulby	 */
4777d33ffe4SDaniel Boulby	set_unset_pstate_bits
478ed108b56SAlexei Fedorov	ret
47997215e0fSDaniel Boulbyendfunc prepare_el3_entry
480ed108b56SAlexei Fedorov
481ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
482ed108b56SAlexei Fedorov * This function restores ARMv8.3-PAuth (if enabled) and all general
483ed108b56SAlexei Fedorov * purpose registers except x30 from the CPU context.
484ed108b56SAlexei Fedorov * x30 register must be explicitly restored by the caller.
485ed108b56SAlexei Fedorov * ------------------------------------------------------------------
486ed108b56SAlexei Fedorov */
487ed108b56SAlexei Fedorovfunc restore_gp_pmcr_pauth_regs
488ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS
489ed108b56SAlexei Fedorov 	/* Restore the ARMv8.3 PAuth keys */
490ed108b56SAlexei Fedorov	add	x10, sp, #CTX_PAUTH_REGS_OFFSET
491ed108b56SAlexei Fedorov
492ed108b56SAlexei Fedorov	ldp	x0, x1, [x10, #CTX_PACIAKEY_LO]	/* x1:x0 = APIAKey */
493ed108b56SAlexei Fedorov	ldp	x2, x3, [x10, #CTX_PACIBKEY_LO]	/* x3:x2 = APIBKey */
494ed108b56SAlexei Fedorov	ldp	x4, x5, [x10, #CTX_PACDAKEY_LO]	/* x5:x4 = APDAKey */
495ed108b56SAlexei Fedorov	ldp	x6, x7, [x10, #CTX_PACDBKEY_LO]	/* x7:x6 = APDBKey */
496ed108b56SAlexei Fedorov	ldp	x8, x9, [x10, #CTX_PACGAKEY_LO]	/* x9:x8 = APGAKey */
497ed108b56SAlexei Fedorov
498ed108b56SAlexei Fedorov	msr	APIAKeyLo_EL1, x0
499ed108b56SAlexei Fedorov	msr	APIAKeyHi_EL1, x1
500ed108b56SAlexei Fedorov	msr	APIBKeyLo_EL1, x2
501ed108b56SAlexei Fedorov	msr	APIBKeyHi_EL1, x3
502ed108b56SAlexei Fedorov	msr	APDAKeyLo_EL1, x4
503ed108b56SAlexei Fedorov	msr	APDAKeyHi_EL1, x5
504ed108b56SAlexei Fedorov	msr	APDBKeyLo_EL1, x6
505ed108b56SAlexei Fedorov	msr	APDBKeyHi_EL1, x7
506ed108b56SAlexei Fedorov	msr	APGAKeyLo_EL1, x8
507ed108b56SAlexei Fedorov	msr	APGAKeyHi_EL1, x9
508ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */
509c73686a1SBoyan Karatotev
510c73686a1SBoyan Karatotev	/* PMUv3 is presumed to be always present */
511ed108b56SAlexei Fedorov	ldr	x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
512ed108b56SAlexei Fedorov	msr	pmcr_el0, x0
513532ed618SSoby Mathew	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
514532ed618SSoby Mathew	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
515532ed618SSoby Mathew	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
516532ed618SSoby Mathew	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
517532ed618SSoby Mathew	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
518532ed618SSoby Mathew	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
519532ed618SSoby Mathew	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
520532ed618SSoby Mathew	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
521ef653d93SJeenu Viswambharan	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
522532ed618SSoby Mathew	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
523532ed618SSoby Mathew	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
524532ed618SSoby Mathew	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
525532ed618SSoby Mathew	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
526532ed618SSoby Mathew	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
527ef653d93SJeenu Viswambharan	ldr	x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
528ef653d93SJeenu Viswambharan	msr	sp_el0, x28
529532ed618SSoby Mathew	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
530ef653d93SJeenu Viswambharan	ret
531ed108b56SAlexei Fedorovendfunc restore_gp_pmcr_pauth_regs
532ef653d93SJeenu Viswambharan
5333b8456bdSManish V Badarkhe/*
5343b8456bdSManish V Badarkhe * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
5353b8456bdSManish V Badarkhe * registers and update EL1 registers to disable stage1 and stage2
5363b8456bdSManish V Badarkhe * page table walk
5373b8456bdSManish V Badarkhe */
5383b8456bdSManish V Badarkhefunc save_and_update_ptw_el1_sys_regs
5393b8456bdSManish V Badarkhe	/* ----------------------------------------------------------
5403b8456bdSManish V Badarkhe	 * Save only sctlr_el1 and tcr_el1 registers
5413b8456bdSManish V Badarkhe	 * ----------------------------------------------------------
5423b8456bdSManish V Badarkhe	 */
5433b8456bdSManish V Badarkhe	mrs	x29, sctlr_el1
5443b8456bdSManish V Badarkhe	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
5453b8456bdSManish V Badarkhe	mrs	x29, tcr_el1
5463b8456bdSManish V Badarkhe	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
5473b8456bdSManish V Badarkhe
5483b8456bdSManish V Badarkhe	/* ------------------------------------------------------------
5493b8456bdSManish V Badarkhe	 * Must follow below order in order to disable page table
5503b8456bdSManish V Badarkhe	 * walk for lower ELs (EL1 and EL0). First step ensures that
5513b8456bdSManish V Badarkhe	 * page table walk is disabled for stage1 and second step
5523b8456bdSManish V Badarkhe	 * ensures that page table walker should use TCR_EL1.EPDx
5533b8456bdSManish V Badarkhe	 * bits to perform address translation. ISB ensures that CPU
5543b8456bdSManish V Badarkhe	 * does these 2 steps in order.
5553b8456bdSManish V Badarkhe	 *
5563b8456bdSManish V Badarkhe	 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
5573b8456bdSManish V Badarkhe	 *    stage1.
5583b8456bdSManish V Badarkhe	 * 2. Enable MMU bit to avoid identity mapping via stage2
5593b8456bdSManish V Badarkhe	 *    and force TCR_EL1.EPDx to be used by the page table
5603b8456bdSManish V Badarkhe	 *    walker.
5613b8456bdSManish V Badarkhe	 * ------------------------------------------------------------
5623b8456bdSManish V Badarkhe	 */
5633b8456bdSManish V Badarkhe	orr	x29, x29, #(TCR_EPD0_BIT)
5643b8456bdSManish V Badarkhe	orr	x29, x29, #(TCR_EPD1_BIT)
5653b8456bdSManish V Badarkhe	msr	tcr_el1, x29
5663b8456bdSManish V Badarkhe	isb
5673b8456bdSManish V Badarkhe	mrs	x29, sctlr_el1
5683b8456bdSManish V Badarkhe	orr	x29, x29, #SCTLR_M_BIT
5693b8456bdSManish V Badarkhe	msr	sctlr_el1, x29
5703b8456bdSManish V Badarkhe	isb
5713b8456bdSManish V Badarkhe
5723b8456bdSManish V Badarkhe	ret
5733b8456bdSManish V Badarkheendfunc save_and_update_ptw_el1_sys_regs
5743b8456bdSManish V Badarkhe
575461c0a5dSElizabeth Ho/* -----------------------------------------------------------------
576461c0a5dSElizabeth Ho* The below macro returns the address of the per_world context for
577461c0a5dSElizabeth Ho* the security state, retrieved through "get_security_state" macro.
578461c0a5dSElizabeth Ho* The per_world context address is returned in the register argument.
579461c0a5dSElizabeth Ho* Clobbers: x9, x10
580461c0a5dSElizabeth Ho* ------------------------------------------------------------------
581461c0a5dSElizabeth Ho*/
582461c0a5dSElizabeth Ho
583461c0a5dSElizabeth Ho.macro get_per_world_context _reg:req
584461c0a5dSElizabeth Ho	ldr 	x10, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
585461c0a5dSElizabeth Ho	get_security_state x9, x10
5864087ed6cSJayanth Dodderi Chidanand	mov_imm	x10, (CTX_PERWORLD_EL3STATE_END - CTX_CPTR_EL3)
587461c0a5dSElizabeth Ho	mul	x9, x9, x10
588461c0a5dSElizabeth Ho	adrp	x10, per_world_context
589461c0a5dSElizabeth Ho	add	x10, x10, :lo12:per_world_context
590461c0a5dSElizabeth Ho	add	x9, x9, x10
591461c0a5dSElizabeth Ho	mov 	\_reg, x9
592461c0a5dSElizabeth Ho.endm
593461c0a5dSElizabeth Ho
594ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
595ed108b56SAlexei Fedorov * This routine assumes that the SP_EL3 is pointing to a valid
596ed108b56SAlexei Fedorov * context structure from where the gp regs and other special
597ed108b56SAlexei Fedorov * registers can be retrieved.
598ed108b56SAlexei Fedorov * ------------------------------------------------------------------
599532ed618SSoby Mathew */
600532ed618SSoby Mathewfunc el3_exit
601bb9549baSJan Dabros#if ENABLE_ASSERTIONS
602bb9549baSJan Dabros	/* el3_exit assumes SP_EL0 on entry */
603bb9549baSJan Dabros	mrs	x17, spsel
604bb9549baSJan Dabros	cmp	x17, #MODE_SP_EL0
605bb9549baSJan Dabros	ASM_ASSERT(eq)
6060ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_ASSERTIONS */
607bb9549baSJan Dabros
608ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
609ed108b56SAlexei Fedorov	 * Save the current SP_EL0 i.e. the EL3 runtime stack which
610ed108b56SAlexei Fedorov	 * will be used for handling the next SMC.
611ed108b56SAlexei Fedorov	 * Then switch to SP_EL3.
612ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
613532ed618SSoby Mathew	 */
614532ed618SSoby Mathew	mov	x17, sp
615ed108b56SAlexei Fedorov	msr	spsel, #MODE_SP_ELX
616532ed618SSoby Mathew	str	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
617532ed618SSoby Mathew
6180c5e7d1cSMax Shvetsov	/* ----------------------------------------------------------
61968ac5ed0SArunachalam Ganapathy	 * Restore CPTR_EL3.
6200c5e7d1cSMax Shvetsov	 * ZCR is only restored if SVE is supported and enabled.
6210c5e7d1cSMax Shvetsov	 * Synchronization is required before zcr_el3 is addressed.
6220c5e7d1cSMax Shvetsov	 * ----------------------------------------------------------
6230c5e7d1cSMax Shvetsov	 */
624461c0a5dSElizabeth Ho
625461c0a5dSElizabeth Ho	/* The address of the per_world context is stored in x9 */
626461c0a5dSElizabeth Ho	get_per_world_context x9
627461c0a5dSElizabeth Ho
628461c0a5dSElizabeth Ho	ldp	x19, x20, [x9, #CTX_CPTR_EL3]
6290c5e7d1cSMax Shvetsov	msr	cptr_el3, x19
6300c5e7d1cSMax Shvetsov
631f0c96a2eSBoyan Karatotev#if IMAGE_BL31
6320c5e7d1cSMax Shvetsov	ands	x19, x19, #CPTR_EZ_BIT
6330c5e7d1cSMax Shvetsov	beq	sve_not_enabled
6340c5e7d1cSMax Shvetsov
6350c5e7d1cSMax Shvetsov	isb
6360c5e7d1cSMax Shvetsov	msr	S3_6_C1_C2_0, x20 /* zcr_el3 */
6370c5e7d1cSMax Shvetsovsve_not_enabled:
638edebefbcSArvind Ram Prakash
639edebefbcSArvind Ram Prakash	restore_mpam3_el3
640edebefbcSArvind Ram Prakash
6410ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 */
6420c5e7d1cSMax Shvetsov
643fe007b2eSDimitris Papastamos#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
644ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
645ed108b56SAlexei Fedorov	 * Restore mitigation state as it was on entry to EL3
646ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
647ed108b56SAlexei Fedorov	 */
648fe007b2eSDimitris Papastamos	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
649ed108b56SAlexei Fedorov	cbz	x17, 1f
650fe007b2eSDimitris Papastamos	blr	x17
6514d1ccf0eSAntonio Nino Diaz1:
6520ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */
6530ce220afSJayanth Dodderi Chidanand
6546597fcf1SManish Pandey#if IMAGE_BL31
6556597fcf1SManish Pandey	synchronize_errors
6566597fcf1SManish Pandey#endif /* IMAGE_BL31 */
6570ce220afSJayanth Dodderi Chidanand
658ff1d2ef3SManish Pandey	/* ----------------------------------------------------------
659ff1d2ef3SManish Pandey	 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
660ff1d2ef3SManish Pandey	 * ----------------------------------------------------------
661ff1d2ef3SManish Pandey	 */
662ff1d2ef3SManish Pandey	ldr	x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
663ff1d2ef3SManish Pandey	ldp	x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
664ff1d2ef3SManish Pandey	msr	scr_el3, x18
665ff1d2ef3SManish Pandey	msr	spsr_el3, x16
666ff1d2ef3SManish Pandey	msr	elr_el3, x17
667ff1d2ef3SManish Pandey
668ff1d2ef3SManish Pandey	restore_ptw_el1_sys_regs
669ff1d2ef3SManish Pandey
670ff1d2ef3SManish Pandey	/* ----------------------------------------------------------
671ff1d2ef3SManish Pandey	 * Restore general purpose (including x30), PMCR_EL0 and
672ff1d2ef3SManish Pandey	 * ARMv8.3-PAuth registers.
673ff1d2ef3SManish Pandey	 * Exit EL3 via ERET to a lower exception level.
674ff1d2ef3SManish Pandey 	 * ----------------------------------------------------------
675ff1d2ef3SManish Pandey 	 */
676ff1d2ef3SManish Pandey	bl	restore_gp_pmcr_pauth_regs
677ff1d2ef3SManish Pandey	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
678ff1d2ef3SManish Pandey
679c2d32a5fSMadhukar Pappireddy#ifdef IMAGE_BL31
680d04c04a4SManish Pandey	/* Clear the EL3 flag as we are exiting el3 */
681d04c04a4SManish Pandey	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_NESTED_EA_FLAG]
6820ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 */
6830ce220afSJayanth Dodderi Chidanand
684f461fe34SAnthony Steinhauser	exception_return
6855283962eSAntonio Nino Diaz
686532ed618SSoby Mathewendfunc el3_exit
687