xref: /rk3399_ARM-atf/lib/el3_runtime/aarch64/context.S (revision 30788a8455779b70aebd38d53afc8aa19d776c6c)
1532ed618SSoby Mathew/*
2*30788a84SGovindraj Raja * Copyright (c) 2013-2024, Arm Limited and Contributors. All rights reserved.
3532ed618SSoby Mathew *
482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause
5532ed618SSoby Mathew */
6532ed618SSoby Mathew
7532ed618SSoby Mathew#include <arch.h>
8532ed618SSoby Mathew#include <asm_macros.S>
9bb9549baSJan Dabros#include <assert_macros.S>
10532ed618SSoby Mathew#include <context.h>
113b8456bdSManish V Badarkhe#include <el3_common_macros.S>
12532ed618SSoby Mathew
13532ed618SSoby Mathew	.global	el1_sysregs_context_save
14532ed618SSoby Mathew	.global	el1_sysregs_context_restore
15532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS
16532ed618SSoby Mathew	.global	fpregs_context_save
17532ed618SSoby Mathew	.global	fpregs_context_restore
180ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_FPREGS */
1997215e0fSDaniel Boulby	.global	prepare_el3_entry
20ed108b56SAlexei Fedorov	.global	restore_gp_pmcr_pauth_regs
213b8456bdSManish V Badarkhe	.global save_and_update_ptw_el1_sys_regs
22532ed618SSoby Mathew	.global	el3_exit
23532ed618SSoby Mathew
2428f39f02SMax Shvetsov
25ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
26ed108b56SAlexei Fedorov * The following function strictly follows the AArch64 PCS to use
27ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers) to save EL1 system
28ed108b56SAlexei Fedorov * register context. It assumes that 'x0' is pointing to a
29ed108b56SAlexei Fedorov * 'el1_sys_regs' structure where the register context will be saved.
30ed108b56SAlexei Fedorov * ------------------------------------------------------------------
31532ed618SSoby Mathew */
32532ed618SSoby Mathewfunc el1_sysregs_context_save
33532ed618SSoby Mathew
34532ed618SSoby Mathew	mrs	x9, spsr_el1
35532ed618SSoby Mathew	mrs	x10, elr_el1
36532ed618SSoby Mathew	stp	x9, x10, [x0, #CTX_SPSR_EL1]
37532ed618SSoby Mathew
383b8456bdSManish V Badarkhe#if !ERRATA_SPECULATIVE_AT
39532ed618SSoby Mathew	mrs	x15, sctlr_el1
40cb55615cSManish V Badarkhe	mrs	x16, tcr_el1
41532ed618SSoby Mathew	stp	x15, x16, [x0, #CTX_SCTLR_EL1]
420ce220afSJayanth Dodderi Chidanand#endif /* ERRATA_SPECULATIVE_AT */
43532ed618SSoby Mathew
44532ed618SSoby Mathew	mrs	x17, cpacr_el1
45532ed618SSoby Mathew	mrs	x9, csselr_el1
46532ed618SSoby Mathew	stp	x17, x9, [x0, #CTX_CPACR_EL1]
47532ed618SSoby Mathew
48532ed618SSoby Mathew	mrs	x10, sp_el1
49532ed618SSoby Mathew	mrs	x11, esr_el1
50532ed618SSoby Mathew	stp	x10, x11, [x0, #CTX_SP_EL1]
51532ed618SSoby Mathew
52532ed618SSoby Mathew	mrs	x12, ttbr0_el1
53532ed618SSoby Mathew	mrs	x13, ttbr1_el1
54532ed618SSoby Mathew	stp	x12, x13, [x0, #CTX_TTBR0_EL1]
55532ed618SSoby Mathew
56532ed618SSoby Mathew	mrs	x14, mair_el1
57532ed618SSoby Mathew	mrs	x15, amair_el1
58532ed618SSoby Mathew	stp	x14, x15, [x0, #CTX_MAIR_EL1]
59532ed618SSoby Mathew
60cb55615cSManish V Badarkhe	mrs	x16, actlr_el1
61532ed618SSoby Mathew	mrs	x17, tpidr_el1
62cb55615cSManish V Badarkhe	stp	x16, x17, [x0, #CTX_ACTLR_EL1]
63532ed618SSoby Mathew
64532ed618SSoby Mathew	mrs	x9, tpidr_el0
65532ed618SSoby Mathew	mrs	x10, tpidrro_el0
66532ed618SSoby Mathew	stp	x9, x10, [x0, #CTX_TPIDR_EL0]
67532ed618SSoby Mathew
68532ed618SSoby Mathew	mrs	x13, par_el1
69532ed618SSoby Mathew	mrs	x14, far_el1
70532ed618SSoby Mathew	stp	x13, x14, [x0, #CTX_PAR_EL1]
71532ed618SSoby Mathew
72532ed618SSoby Mathew	mrs	x15, afsr0_el1
73532ed618SSoby Mathew	mrs	x16, afsr1_el1
74532ed618SSoby Mathew	stp	x15, x16, [x0, #CTX_AFSR0_EL1]
75532ed618SSoby Mathew
76532ed618SSoby Mathew	mrs	x17, contextidr_el1
77532ed618SSoby Mathew	mrs	x9, vbar_el1
78532ed618SSoby Mathew	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
79532ed618SSoby Mathew
80532ed618SSoby Mathew	/* Save AArch32 system registers if the build has instructed so */
81532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS
82532ed618SSoby Mathew	mrs	x11, spsr_abt
83532ed618SSoby Mathew	mrs	x12, spsr_und
84532ed618SSoby Mathew	stp	x11, x12, [x0, #CTX_SPSR_ABT]
85532ed618SSoby Mathew
86532ed618SSoby Mathew	mrs	x13, spsr_irq
87532ed618SSoby Mathew	mrs	x14, spsr_fiq
88532ed618SSoby Mathew	stp	x13, x14, [x0, #CTX_SPSR_IRQ]
89532ed618SSoby Mathew
90532ed618SSoby Mathew	mrs	x15, dacr32_el2
91532ed618SSoby Mathew	mrs	x16, ifsr32_el2
92532ed618SSoby Mathew	stp	x15, x16, [x0, #CTX_DACR32_EL2]
930ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */
94532ed618SSoby Mathew
95532ed618SSoby Mathew	/* Save NS timer registers if the build has instructed so */
96532ed618SSoby Mathew#if NS_TIMER_SWITCH
97532ed618SSoby Mathew	mrs	x10, cntp_ctl_el0
98532ed618SSoby Mathew	mrs	x11, cntp_cval_el0
99532ed618SSoby Mathew	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
100532ed618SSoby Mathew
101532ed618SSoby Mathew	mrs	x12, cntv_ctl_el0
102532ed618SSoby Mathew	mrs	x13, cntv_cval_el0
103532ed618SSoby Mathew	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
104532ed618SSoby Mathew
105532ed618SSoby Mathew	mrs	x14, cntkctl_el1
106532ed618SSoby Mathew	str	x14, [x0, #CTX_CNTKCTL_EL1]
1070ce220afSJayanth Dodderi Chidanand#endif /* NS_TIMER_SWITCH */
108532ed618SSoby Mathew
1099dd94382SJustin Chadwell	/* Save MTE system registers if the build has instructed so */
110*30788a84SGovindraj Raja#if ENABLE_FEAT_MTE
111*30788a84SGovindraj Raja#if ENABLE_FEAT_MTE == 2
112*30788a84SGovindraj Raja	mrs x8, id_aa64pfr1_el1
113*30788a84SGovindraj Raja	and x8, x8, #(ID_AA64PFR1_EL1_MTE_MASK << ID_AA64PFR1_EL1_MTE_SHIFT)
114*30788a84SGovindraj Raja	cbz x8, no_mte_save
115*30788a84SGovindraj Raja#endif
1169dd94382SJustin Chadwell	mrs	x15, TFSRE0_EL1
1179dd94382SJustin Chadwell	mrs	x16, TFSR_EL1
1189dd94382SJustin Chadwell	stp	x15, x16, [x0, #CTX_TFSRE0_EL1]
1199dd94382SJustin Chadwell
1209dd94382SJustin Chadwell	mrs	x9, RGSR_EL1
1219dd94382SJustin Chadwell	mrs	x10, GCR_EL1
1229dd94382SJustin Chadwell	stp	x9, x10, [x0, #CTX_RGSR_EL1]
123*30788a84SGovindraj Raja
124*30788a84SGovindraj Rajano_mte_save:
125*30788a84SGovindraj Raja#endif /* ENABLE_FEAT_MTE */
1269dd94382SJustin Chadwell
127532ed618SSoby Mathew	ret
128532ed618SSoby Mathewendfunc el1_sysregs_context_save
129532ed618SSoby Mathew
130ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
131ed108b56SAlexei Fedorov * The following function strictly follows the AArch64 PCS to use
132ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers) to restore EL1 system
133ed108b56SAlexei Fedorov * register context.  It assumes that 'x0' is pointing to a
134ed108b56SAlexei Fedorov * 'el1_sys_regs' structure from where the register context will be
135ed108b56SAlexei Fedorov * restored
136ed108b56SAlexei Fedorov * ------------------------------------------------------------------
137532ed618SSoby Mathew */
138532ed618SSoby Mathewfunc el1_sysregs_context_restore
139532ed618SSoby Mathew
140532ed618SSoby Mathew	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
141532ed618SSoby Mathew	msr	spsr_el1, x9
142532ed618SSoby Mathew	msr	elr_el1, x10
143532ed618SSoby Mathew
1443b8456bdSManish V Badarkhe#if !ERRATA_SPECULATIVE_AT
145fb2072b0SManish V Badarkhe	ldp	x15, x16, [x0, #CTX_SCTLR_EL1]
146fb2072b0SManish V Badarkhe	msr	sctlr_el1, x15
147cb55615cSManish V Badarkhe	msr	tcr_el1, x16
1480ce220afSJayanth Dodderi Chidanand#endif /* ERRATA_SPECULATIVE_AT */
149532ed618SSoby Mathew
150532ed618SSoby Mathew	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
151532ed618SSoby Mathew	msr	cpacr_el1, x17
152532ed618SSoby Mathew	msr	csselr_el1, x9
153532ed618SSoby Mathew
154532ed618SSoby Mathew	ldp	x10, x11, [x0, #CTX_SP_EL1]
155532ed618SSoby Mathew	msr	sp_el1, x10
156532ed618SSoby Mathew	msr	esr_el1, x11
157532ed618SSoby Mathew
158532ed618SSoby Mathew	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
159532ed618SSoby Mathew	msr	ttbr0_el1, x12
160532ed618SSoby Mathew	msr	ttbr1_el1, x13
161532ed618SSoby Mathew
162532ed618SSoby Mathew	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
163532ed618SSoby Mathew	msr	mair_el1, x14
164532ed618SSoby Mathew	msr	amair_el1, x15
165532ed618SSoby Mathew
166cb55615cSManish V Badarkhe	ldp 	x16, x17, [x0, #CTX_ACTLR_EL1]
167cb55615cSManish V Badarkhe	msr	actlr_el1, x16
168fb2072b0SManish V Badarkhe	msr	tpidr_el1, x17
169532ed618SSoby Mathew
170532ed618SSoby Mathew	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
171532ed618SSoby Mathew	msr	tpidr_el0, x9
172532ed618SSoby Mathew	msr	tpidrro_el0, x10
173532ed618SSoby Mathew
174532ed618SSoby Mathew	ldp	x13, x14, [x0, #CTX_PAR_EL1]
175532ed618SSoby Mathew	msr	par_el1, x13
176532ed618SSoby Mathew	msr	far_el1, x14
177532ed618SSoby Mathew
178532ed618SSoby Mathew	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
179532ed618SSoby Mathew	msr	afsr0_el1, x15
180532ed618SSoby Mathew	msr	afsr1_el1, x16
181532ed618SSoby Mathew
182532ed618SSoby Mathew	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
183532ed618SSoby Mathew	msr	contextidr_el1, x17
184532ed618SSoby Mathew	msr	vbar_el1, x9
185532ed618SSoby Mathew
186532ed618SSoby Mathew	/* Restore AArch32 system registers if the build has instructed so */
187532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS
188532ed618SSoby Mathew	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
189532ed618SSoby Mathew	msr	spsr_abt, x11
190532ed618SSoby Mathew	msr	spsr_und, x12
191532ed618SSoby Mathew
192532ed618SSoby Mathew	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
193532ed618SSoby Mathew	msr	spsr_irq, x13
194532ed618SSoby Mathew	msr	spsr_fiq, x14
195532ed618SSoby Mathew
196532ed618SSoby Mathew	ldp	x15, x16, [x0, #CTX_DACR32_EL2]
197532ed618SSoby Mathew	msr	dacr32_el2, x15
198532ed618SSoby Mathew	msr	ifsr32_el2, x16
1990ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */
2000ce220afSJayanth Dodderi Chidanand
201532ed618SSoby Mathew	/* Restore NS timer registers if the build has instructed so */
202532ed618SSoby Mathew#if NS_TIMER_SWITCH
203532ed618SSoby Mathew	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
204532ed618SSoby Mathew	msr	cntp_ctl_el0, x10
205532ed618SSoby Mathew	msr	cntp_cval_el0, x11
206532ed618SSoby Mathew
207532ed618SSoby Mathew	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
208532ed618SSoby Mathew	msr	cntv_ctl_el0, x12
209532ed618SSoby Mathew	msr	cntv_cval_el0, x13
210532ed618SSoby Mathew
211532ed618SSoby Mathew	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
212532ed618SSoby Mathew	msr	cntkctl_el1, x14
2130ce220afSJayanth Dodderi Chidanand#endif /* NS_TIMER_SWITCH */
2140ce220afSJayanth Dodderi Chidanand
2159dd94382SJustin Chadwell	/* Restore MTE system registers if the build has instructed so */
216*30788a84SGovindraj Raja#if ENABLE_FEAT_MTE
217*30788a84SGovindraj Raja#if ENABLE_FEAT_MTE == 2
218*30788a84SGovindraj Raja	mrs x8, id_aa64pfr1_el1
219*30788a84SGovindraj Raja	and x8, x8, #(ID_AA64PFR1_EL1_MTE_MASK << ID_AA64PFR1_EL1_MTE_SHIFT)
220*30788a84SGovindraj Raja	cbz x8, no_mte_restore
221*30788a84SGovindraj Raja#endif
222*30788a84SGovindraj Raja
2239dd94382SJustin Chadwell	ldp	x11, x12, [x0, #CTX_TFSRE0_EL1]
2249dd94382SJustin Chadwell	msr	TFSRE0_EL1, x11
2259dd94382SJustin Chadwell	msr	TFSR_EL1, x12
2269dd94382SJustin Chadwell
2279dd94382SJustin Chadwell	ldp	x13, x14, [x0, #CTX_RGSR_EL1]
2289dd94382SJustin Chadwell	msr	RGSR_EL1, x13
2299dd94382SJustin Chadwell	msr	GCR_EL1, x14
230*30788a84SGovindraj Raja
231*30788a84SGovindraj Rajano_mte_restore:
232*30788a84SGovindraj Raja#endif /* ENABLE_FEAT_MTE */
233532ed618SSoby Mathew
234532ed618SSoby Mathew	/* No explict ISB required here as ERET covers it */
235532ed618SSoby Mathew	ret
236532ed618SSoby Mathewendfunc el1_sysregs_context_restore
237532ed618SSoby Mathew
238ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
239ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use
240ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
241ed108b56SAlexei Fedorov * to save floating point register context. It assumes that 'x0' is
242ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure where the register context will
243532ed618SSoby Mathew * be saved.
244532ed618SSoby Mathew *
245ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set.
246ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in
247ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared.
248532ed618SSoby Mathew *
249532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world
250ed108b56SAlexei Fedorov * ------------------------------------------------------------------
251532ed618SSoby Mathew */
252532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS
253532ed618SSoby Mathewfunc fpregs_context_save
254532ed618SSoby Mathew	stp	q0, q1, [x0, #CTX_FP_Q0]
255532ed618SSoby Mathew	stp	q2, q3, [x0, #CTX_FP_Q2]
256532ed618SSoby Mathew	stp	q4, q5, [x0, #CTX_FP_Q4]
257532ed618SSoby Mathew	stp	q6, q7, [x0, #CTX_FP_Q6]
258532ed618SSoby Mathew	stp	q8, q9, [x0, #CTX_FP_Q8]
259532ed618SSoby Mathew	stp	q10, q11, [x0, #CTX_FP_Q10]
260532ed618SSoby Mathew	stp	q12, q13, [x0, #CTX_FP_Q12]
261532ed618SSoby Mathew	stp	q14, q15, [x0, #CTX_FP_Q14]
262532ed618SSoby Mathew	stp	q16, q17, [x0, #CTX_FP_Q16]
263532ed618SSoby Mathew	stp	q18, q19, [x0, #CTX_FP_Q18]
264532ed618SSoby Mathew	stp	q20, q21, [x0, #CTX_FP_Q20]
265532ed618SSoby Mathew	stp	q22, q23, [x0, #CTX_FP_Q22]
266532ed618SSoby Mathew	stp	q24, q25, [x0, #CTX_FP_Q24]
267532ed618SSoby Mathew	stp	q26, q27, [x0, #CTX_FP_Q26]
268532ed618SSoby Mathew	stp	q28, q29, [x0, #CTX_FP_Q28]
269532ed618SSoby Mathew	stp	q30, q31, [x0, #CTX_FP_Q30]
270532ed618SSoby Mathew
271532ed618SSoby Mathew	mrs	x9, fpsr
272532ed618SSoby Mathew	str	x9, [x0, #CTX_FP_FPSR]
273532ed618SSoby Mathew
274532ed618SSoby Mathew	mrs	x10, fpcr
275532ed618SSoby Mathew	str	x10, [x0, #CTX_FP_FPCR]
276532ed618SSoby Mathew
27791089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS
27891089f36SDavid Cunado	mrs	x11, fpexc32_el2
27991089f36SDavid Cunado	str	x11, [x0, #CTX_FP_FPEXC32_EL2]
2800ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */
281532ed618SSoby Mathew	ret
282532ed618SSoby Mathewendfunc fpregs_context_save
283532ed618SSoby Mathew
284ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
285ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use x9-x17
286ed108b56SAlexei Fedorov * (temporary caller-saved registers according to AArch64 PCS) to
287ed108b56SAlexei Fedorov * restore floating point register context. It assumes that 'x0' is
288ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure from where the register context
289532ed618SSoby Mathew * will be restored.
290532ed618SSoby Mathew *
291ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set.
292ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in
293ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared.
294532ed618SSoby Mathew *
295532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world
296ed108b56SAlexei Fedorov * ------------------------------------------------------------------
297532ed618SSoby Mathew */
298532ed618SSoby Mathewfunc fpregs_context_restore
299532ed618SSoby Mathew	ldp	q0, q1, [x0, #CTX_FP_Q0]
300532ed618SSoby Mathew	ldp	q2, q3, [x0, #CTX_FP_Q2]
301532ed618SSoby Mathew	ldp	q4, q5, [x0, #CTX_FP_Q4]
302532ed618SSoby Mathew	ldp	q6, q7, [x0, #CTX_FP_Q6]
303532ed618SSoby Mathew	ldp	q8, q9, [x0, #CTX_FP_Q8]
304532ed618SSoby Mathew	ldp	q10, q11, [x0, #CTX_FP_Q10]
305532ed618SSoby Mathew	ldp	q12, q13, [x0, #CTX_FP_Q12]
306532ed618SSoby Mathew	ldp	q14, q15, [x0, #CTX_FP_Q14]
307532ed618SSoby Mathew	ldp	q16, q17, [x0, #CTX_FP_Q16]
308532ed618SSoby Mathew	ldp	q18, q19, [x0, #CTX_FP_Q18]
309532ed618SSoby Mathew	ldp	q20, q21, [x0, #CTX_FP_Q20]
310532ed618SSoby Mathew	ldp	q22, q23, [x0, #CTX_FP_Q22]
311532ed618SSoby Mathew	ldp	q24, q25, [x0, #CTX_FP_Q24]
312532ed618SSoby Mathew	ldp	q26, q27, [x0, #CTX_FP_Q26]
313532ed618SSoby Mathew	ldp	q28, q29, [x0, #CTX_FP_Q28]
314532ed618SSoby Mathew	ldp	q30, q31, [x0, #CTX_FP_Q30]
315532ed618SSoby Mathew
316532ed618SSoby Mathew	ldr	x9, [x0, #CTX_FP_FPSR]
317532ed618SSoby Mathew	msr	fpsr, x9
318532ed618SSoby Mathew
319532ed618SSoby Mathew	ldr	x10, [x0, #CTX_FP_FPCR]
320532ed618SSoby Mathew	msr	fpcr, x10
321532ed618SSoby Mathew
32291089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS
32391089f36SDavid Cunado	ldr	x11, [x0, #CTX_FP_FPEXC32_EL2]
32491089f36SDavid Cunado	msr	fpexc32_el2, x11
3250ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */
3260ce220afSJayanth Dodderi Chidanand
327532ed618SSoby Mathew	/*
328532ed618SSoby Mathew	 * No explict ISB required here as ERET to
329532ed618SSoby Mathew	 * switch to secure EL1 or non-secure world
330532ed618SSoby Mathew	 * covers it
331532ed618SSoby Mathew	 */
332532ed618SSoby Mathew
333532ed618SSoby Mathew	ret
334532ed618SSoby Mathewendfunc fpregs_context_restore
335532ed618SSoby Mathew#endif /* CTX_INCLUDE_FPREGS */
336532ed618SSoby Mathew
3377d33ffe4SDaniel Boulby	/*
3381cbe42a5SManish Pandey	 * Set SCR_EL3.EA bit to enable SErrors at EL3
3391cbe42a5SManish Pandey	 */
3401cbe42a5SManish Pandey	.macro enable_serror_at_el3
3411cbe42a5SManish Pandey	mrs     x8, scr_el3
3421cbe42a5SManish Pandey	orr     x8, x8, #SCR_EA_BIT
3431cbe42a5SManish Pandey	msr     scr_el3, x8
3441cbe42a5SManish Pandey	.endm
3451cbe42a5SManish Pandey
3461cbe42a5SManish Pandey	/*
3477d33ffe4SDaniel Boulby	 * Set the PSTATE bits not set when the exception was taken as
3487d33ffe4SDaniel Boulby	 * described in the AArch64.TakeException() pseudocode function
3497d33ffe4SDaniel Boulby	 * in ARM DDI 0487F.c page J1-7635 to a default value.
3507d33ffe4SDaniel Boulby	 */
3517d33ffe4SDaniel Boulby	.macro set_unset_pstate_bits
3527d33ffe4SDaniel Boulby	/*
3537d33ffe4SDaniel Boulby	 * If Data Independent Timing (DIT) functionality is implemented,
3547d33ffe4SDaniel Boulby	 * always enable DIT in EL3
3557d33ffe4SDaniel Boulby	 */
3567d33ffe4SDaniel Boulby#if ENABLE_FEAT_DIT
35788727fc3SAndre Przywara#if ENABLE_FEAT_DIT == 2
35888727fc3SAndre Przywara	mrs	x8, id_aa64pfr0_el1
35988727fc3SAndre Przywara	and	x8, x8, #(ID_AA64PFR0_DIT_MASK << ID_AA64PFR0_DIT_SHIFT)
36088727fc3SAndre Przywara	cbz	x8, 1f
36188727fc3SAndre Przywara#endif
3627d33ffe4SDaniel Boulby	mov     x8, #DIT_BIT
3637d33ffe4SDaniel Boulby	msr     DIT, x8
36488727fc3SAndre Przywara1:
3657d33ffe4SDaniel Boulby#endif /* ENABLE_FEAT_DIT */
3667d33ffe4SDaniel Boulby	.endm /* set_unset_pstate_bits */
3677d33ffe4SDaniel Boulby
368edebefbcSArvind Ram Prakash/*-------------------------------------------------------------------------
369edebefbcSArvind Ram Prakash * This macro checks the ENABLE_FEAT_MPAM state, performs ID register
370edebefbcSArvind Ram Prakash * check to see if the platform supports MPAM extension and restores MPAM3
371edebefbcSArvind Ram Prakash * register value if it is FEAT_STATE_ENABLED/FEAT_STATE_CHECKED.
372edebefbcSArvind Ram Prakash *
373edebefbcSArvind Ram Prakash * This is particularly more complicated because we can't check
374edebefbcSArvind Ram Prakash * if the platform supports MPAM  by looking for status of a particular bit
375edebefbcSArvind Ram Prakash * in the MDCR_EL3 or CPTR_EL3 register like other extensions.
376edebefbcSArvind Ram Prakash * ------------------------------------------------------------------------
377edebefbcSArvind Ram Prakash */
378edebefbcSArvind Ram Prakash
379edebefbcSArvind Ram Prakash	.macro	restore_mpam3_el3
380edebefbcSArvind Ram Prakash#if ENABLE_FEAT_MPAM
381edebefbcSArvind Ram Prakash#if ENABLE_FEAT_MPAM == 2
382edebefbcSArvind Ram Prakash
383edebefbcSArvind Ram Prakash	mrs x8, id_aa64pfr0_el1
384edebefbcSArvind Ram Prakash	lsr x8, x8, #(ID_AA64PFR0_MPAM_SHIFT)
385edebefbcSArvind Ram Prakash	and x8, x8, #(ID_AA64PFR0_MPAM_MASK)
386edebefbcSArvind Ram Prakash	mrs x7, id_aa64pfr1_el1
387edebefbcSArvind Ram Prakash	lsr x7, x7, #(ID_AA64PFR1_MPAM_FRAC_SHIFT)
388edebefbcSArvind Ram Prakash	and x7, x7, #(ID_AA64PFR1_MPAM_FRAC_MASK)
389edebefbcSArvind Ram Prakash	orr x7, x7, x8
390edebefbcSArvind Ram Prakash	cbz x7, no_mpam
391edebefbcSArvind Ram Prakash#endif
392edebefbcSArvind Ram Prakash	/* -----------------------------------------------------------
393edebefbcSArvind Ram Prakash	 * Restore MPAM3_EL3 register as per context state
394edebefbcSArvind Ram Prakash	 * Currently we only enable MPAM for NS world and trap to EL3
395edebefbcSArvind Ram Prakash	 * for MPAM access in lower ELs of Secure and Realm world
396ac4f6aafSArvind Ram Prakash	 * x9 holds address of the per_world context
397edebefbcSArvind Ram Prakash	 * -----------------------------------------------------------
398edebefbcSArvind Ram Prakash	 */
399ac4f6aafSArvind Ram Prakash
400ac4f6aafSArvind Ram Prakash	ldr	x17, [x9, #CTX_MPAM3_EL3]
401edebefbcSArvind Ram Prakash	msr	S3_6_C10_C5_0, x17 /* mpam3_el3 */
402edebefbcSArvind Ram Prakash
403edebefbcSArvind Ram Prakashno_mpam:
404edebefbcSArvind Ram Prakash#endif
405edebefbcSArvind Ram Prakash	.endm /* restore_mpam3_el3 */
406edebefbcSArvind Ram Prakash
407ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
40897215e0fSDaniel Boulby * The following macro is used to save and restore all the general
409ed108b56SAlexei Fedorov * purpose and ARMv8.3-PAuth (if enabled) registers.
410d64bfef5SJayanth Dodderi Chidanand * It also checks if the Secure Cycle Counter (PMCCNTR_EL0)
411d64bfef5SJayanth Dodderi Chidanand * is disabled in EL3/Secure (ARMv8.5-PMU), wherein PMCCNTR_EL0
412d64bfef5SJayanth Dodderi Chidanand * needs not to be saved/restored during world switch.
413ed108b56SAlexei Fedorov *
414ed108b56SAlexei Fedorov * Ideally we would only save and restore the callee saved registers
415ed108b56SAlexei Fedorov * when a world switch occurs but that type of implementation is more
416ed108b56SAlexei Fedorov * complex. So currently we will always save and restore these
417ed108b56SAlexei Fedorov * registers on entry and exit of EL3.
418532ed618SSoby Mathew * clobbers: x18
419ed108b56SAlexei Fedorov * ------------------------------------------------------------------
420532ed618SSoby Mathew */
42197215e0fSDaniel Boulby	.macro save_gp_pmcr_pauth_regs
422532ed618SSoby Mathew	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
423532ed618SSoby Mathew	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
424532ed618SSoby Mathew	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
425532ed618SSoby Mathew	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
426532ed618SSoby Mathew	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
427532ed618SSoby Mathew	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
428532ed618SSoby Mathew	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
429532ed618SSoby Mathew	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
430532ed618SSoby Mathew	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
431532ed618SSoby Mathew	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
432532ed618SSoby Mathew	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
433532ed618SSoby Mathew	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
434532ed618SSoby Mathew	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
435532ed618SSoby Mathew	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
436532ed618SSoby Mathew	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
437532ed618SSoby Mathew	mrs	x18, sp_el0
438532ed618SSoby Mathew	str	x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
439c73686a1SBoyan Karatotev
440c73686a1SBoyan Karatotev	/* PMUv3 is presumed to be always present */
441ed108b56SAlexei Fedorov	mrs	x9, pmcr_el0
442ed108b56SAlexei Fedorov	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
443ed108b56SAlexei Fedorov	/* Disable cycle counter when event counting is prohibited */
4441d6d6802SBoyan Karatotev	orr	x9, x9, #PMCR_EL0_DP_BIT
445ed108b56SAlexei Fedorov	msr	pmcr_el0, x9
446ed108b56SAlexei Fedorov	isb
447ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS
448ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
449ed108b56SAlexei Fedorov 	 * Save the ARMv8.3-PAuth keys as they are not banked
450ed108b56SAlexei Fedorov 	 * by exception level
451ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
452ed108b56SAlexei Fedorov	 */
453ed108b56SAlexei Fedorov	add	x19, sp, #CTX_PAUTH_REGS_OFFSET
454ed108b56SAlexei Fedorov
455ed108b56SAlexei Fedorov	mrs	x20, APIAKeyLo_EL1	/* x21:x20 = APIAKey */
456ed108b56SAlexei Fedorov	mrs	x21, APIAKeyHi_EL1
457ed108b56SAlexei Fedorov	mrs	x22, APIBKeyLo_EL1	/* x23:x22 = APIBKey */
458ed108b56SAlexei Fedorov	mrs	x23, APIBKeyHi_EL1
459ed108b56SAlexei Fedorov	mrs	x24, APDAKeyLo_EL1	/* x25:x24 = APDAKey */
460ed108b56SAlexei Fedorov	mrs	x25, APDAKeyHi_EL1
461ed108b56SAlexei Fedorov	mrs	x26, APDBKeyLo_EL1	/* x27:x26 = APDBKey */
462ed108b56SAlexei Fedorov	mrs	x27, APDBKeyHi_EL1
463ed108b56SAlexei Fedorov	mrs	x28, APGAKeyLo_EL1	/* x29:x28 = APGAKey */
464ed108b56SAlexei Fedorov	mrs	x29, APGAKeyHi_EL1
465ed108b56SAlexei Fedorov
466ed108b56SAlexei Fedorov	stp	x20, x21, [x19, #CTX_PACIAKEY_LO]
467ed108b56SAlexei Fedorov	stp	x22, x23, [x19, #CTX_PACIBKEY_LO]
468ed108b56SAlexei Fedorov	stp	x24, x25, [x19, #CTX_PACDAKEY_LO]
469ed108b56SAlexei Fedorov	stp	x26, x27, [x19, #CTX_PACDBKEY_LO]
470ed108b56SAlexei Fedorov	stp	x28, x29, [x19, #CTX_PACGAKEY_LO]
471ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */
47297215e0fSDaniel Boulby	.endm /* save_gp_pmcr_pauth_regs */
47397215e0fSDaniel Boulby
47497215e0fSDaniel Boulby/* -----------------------------------------------------------------
4757d33ffe4SDaniel Boulby * This function saves the context and sets the PSTATE to a known
4767d33ffe4SDaniel Boulby * state, preparing entry to el3.
47797215e0fSDaniel Boulby * Save all the general purpose and ARMv8.3-PAuth (if enabled)
47897215e0fSDaniel Boulby * registers.
4797d33ffe4SDaniel Boulby * Then set any of the PSTATE bits that are not set by hardware
4807d33ffe4SDaniel Boulby * according to the Aarch64.TakeException pseudocode in the Arm
4817d33ffe4SDaniel Boulby * Architecture Reference Manual to a default value for EL3.
4827d33ffe4SDaniel Boulby * clobbers: x17
48397215e0fSDaniel Boulby * -----------------------------------------------------------------
48497215e0fSDaniel Boulby */
48597215e0fSDaniel Boulbyfunc prepare_el3_entry
48697215e0fSDaniel Boulby	save_gp_pmcr_pauth_regs
4871cbe42a5SManish Pandey	enable_serror_at_el3
4887d33ffe4SDaniel Boulby	/*
4897d33ffe4SDaniel Boulby	 * Set the PSTATE bits not described in the Aarch64.TakeException
4907d33ffe4SDaniel Boulby	 * pseudocode to their default values.
4917d33ffe4SDaniel Boulby	 */
4927d33ffe4SDaniel Boulby	set_unset_pstate_bits
493ed108b56SAlexei Fedorov	ret
49497215e0fSDaniel Boulbyendfunc prepare_el3_entry
495ed108b56SAlexei Fedorov
496ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
497ed108b56SAlexei Fedorov * This function restores ARMv8.3-PAuth (if enabled) and all general
498ed108b56SAlexei Fedorov * purpose registers except x30 from the CPU context.
499ed108b56SAlexei Fedorov * x30 register must be explicitly restored by the caller.
500ed108b56SAlexei Fedorov * ------------------------------------------------------------------
501ed108b56SAlexei Fedorov */
502ed108b56SAlexei Fedorovfunc restore_gp_pmcr_pauth_regs
503ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS
504ed108b56SAlexei Fedorov 	/* Restore the ARMv8.3 PAuth keys */
505ed108b56SAlexei Fedorov	add	x10, sp, #CTX_PAUTH_REGS_OFFSET
506ed108b56SAlexei Fedorov
507ed108b56SAlexei Fedorov	ldp	x0, x1, [x10, #CTX_PACIAKEY_LO]	/* x1:x0 = APIAKey */
508ed108b56SAlexei Fedorov	ldp	x2, x3, [x10, #CTX_PACIBKEY_LO]	/* x3:x2 = APIBKey */
509ed108b56SAlexei Fedorov	ldp	x4, x5, [x10, #CTX_PACDAKEY_LO]	/* x5:x4 = APDAKey */
510ed108b56SAlexei Fedorov	ldp	x6, x7, [x10, #CTX_PACDBKEY_LO]	/* x7:x6 = APDBKey */
511ed108b56SAlexei Fedorov	ldp	x8, x9, [x10, #CTX_PACGAKEY_LO]	/* x9:x8 = APGAKey */
512ed108b56SAlexei Fedorov
513ed108b56SAlexei Fedorov	msr	APIAKeyLo_EL1, x0
514ed108b56SAlexei Fedorov	msr	APIAKeyHi_EL1, x1
515ed108b56SAlexei Fedorov	msr	APIBKeyLo_EL1, x2
516ed108b56SAlexei Fedorov	msr	APIBKeyHi_EL1, x3
517ed108b56SAlexei Fedorov	msr	APDAKeyLo_EL1, x4
518ed108b56SAlexei Fedorov	msr	APDAKeyHi_EL1, x5
519ed108b56SAlexei Fedorov	msr	APDBKeyLo_EL1, x6
520ed108b56SAlexei Fedorov	msr	APDBKeyHi_EL1, x7
521ed108b56SAlexei Fedorov	msr	APGAKeyLo_EL1, x8
522ed108b56SAlexei Fedorov	msr	APGAKeyHi_EL1, x9
523ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */
524c73686a1SBoyan Karatotev
525c73686a1SBoyan Karatotev	/* PMUv3 is presumed to be always present */
526ed108b56SAlexei Fedorov	ldr	x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
527ed108b56SAlexei Fedorov	msr	pmcr_el0, x0
528532ed618SSoby Mathew	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
529532ed618SSoby Mathew	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
530532ed618SSoby Mathew	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
531532ed618SSoby Mathew	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
532532ed618SSoby Mathew	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
533532ed618SSoby Mathew	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
534532ed618SSoby Mathew	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
535532ed618SSoby Mathew	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
536ef653d93SJeenu Viswambharan	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
537532ed618SSoby Mathew	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
538532ed618SSoby Mathew	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
539532ed618SSoby Mathew	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
540532ed618SSoby Mathew	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
541532ed618SSoby Mathew	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
542ef653d93SJeenu Viswambharan	ldr	x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
543ef653d93SJeenu Viswambharan	msr	sp_el0, x28
544532ed618SSoby Mathew	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
545ef653d93SJeenu Viswambharan	ret
546ed108b56SAlexei Fedorovendfunc restore_gp_pmcr_pauth_regs
547ef653d93SJeenu Viswambharan
5483b8456bdSManish V Badarkhe/*
5493b8456bdSManish V Badarkhe * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
5503b8456bdSManish V Badarkhe * registers and update EL1 registers to disable stage1 and stage2
5513b8456bdSManish V Badarkhe * page table walk
5523b8456bdSManish V Badarkhe */
5533b8456bdSManish V Badarkhefunc save_and_update_ptw_el1_sys_regs
5543b8456bdSManish V Badarkhe	/* ----------------------------------------------------------
5553b8456bdSManish V Badarkhe	 * Save only sctlr_el1 and tcr_el1 registers
5563b8456bdSManish V Badarkhe	 * ----------------------------------------------------------
5573b8456bdSManish V Badarkhe	 */
5583b8456bdSManish V Badarkhe	mrs	x29, sctlr_el1
5593b8456bdSManish V Badarkhe	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
5603b8456bdSManish V Badarkhe	mrs	x29, tcr_el1
5613b8456bdSManish V Badarkhe	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
5623b8456bdSManish V Badarkhe
5633b8456bdSManish V Badarkhe	/* ------------------------------------------------------------
5643b8456bdSManish V Badarkhe	 * Must follow below order in order to disable page table
5653b8456bdSManish V Badarkhe	 * walk for lower ELs (EL1 and EL0). First step ensures that
5663b8456bdSManish V Badarkhe	 * page table walk is disabled for stage1 and second step
5673b8456bdSManish V Badarkhe	 * ensures that page table walker should use TCR_EL1.EPDx
5683b8456bdSManish V Badarkhe	 * bits to perform address translation. ISB ensures that CPU
5693b8456bdSManish V Badarkhe	 * does these 2 steps in order.
5703b8456bdSManish V Badarkhe	 *
5713b8456bdSManish V Badarkhe	 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
5723b8456bdSManish V Badarkhe	 *    stage1.
5733b8456bdSManish V Badarkhe	 * 2. Enable MMU bit to avoid identity mapping via stage2
5743b8456bdSManish V Badarkhe	 *    and force TCR_EL1.EPDx to be used by the page table
5753b8456bdSManish V Badarkhe	 *    walker.
5763b8456bdSManish V Badarkhe	 * ------------------------------------------------------------
5773b8456bdSManish V Badarkhe	 */
5783b8456bdSManish V Badarkhe	orr	x29, x29, #(TCR_EPD0_BIT)
5793b8456bdSManish V Badarkhe	orr	x29, x29, #(TCR_EPD1_BIT)
5803b8456bdSManish V Badarkhe	msr	tcr_el1, x29
5813b8456bdSManish V Badarkhe	isb
5823b8456bdSManish V Badarkhe	mrs	x29, sctlr_el1
5833b8456bdSManish V Badarkhe	orr	x29, x29, #SCTLR_M_BIT
5843b8456bdSManish V Badarkhe	msr	sctlr_el1, x29
5853b8456bdSManish V Badarkhe	isb
5863b8456bdSManish V Badarkhe
5873b8456bdSManish V Badarkhe	ret
5883b8456bdSManish V Badarkheendfunc save_and_update_ptw_el1_sys_regs
5893b8456bdSManish V Badarkhe
590461c0a5dSElizabeth Ho/* -----------------------------------------------------------------
591461c0a5dSElizabeth Ho* The below macro returns the address of the per_world context for
592461c0a5dSElizabeth Ho* the security state, retrieved through "get_security_state" macro.
593461c0a5dSElizabeth Ho* The per_world context address is returned in the register argument.
594461c0a5dSElizabeth Ho* Clobbers: x9, x10
595461c0a5dSElizabeth Ho* ------------------------------------------------------------------
596461c0a5dSElizabeth Ho*/
597461c0a5dSElizabeth Ho
598461c0a5dSElizabeth Ho.macro get_per_world_context _reg:req
599461c0a5dSElizabeth Ho	ldr 	x10, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
600461c0a5dSElizabeth Ho	get_security_state x9, x10
6014087ed6cSJayanth Dodderi Chidanand	mov_imm	x10, (CTX_PERWORLD_EL3STATE_END - CTX_CPTR_EL3)
602461c0a5dSElizabeth Ho	mul	x9, x9, x10
603461c0a5dSElizabeth Ho	adrp	x10, per_world_context
604461c0a5dSElizabeth Ho	add	x10, x10, :lo12:per_world_context
605461c0a5dSElizabeth Ho	add	x9, x9, x10
606461c0a5dSElizabeth Ho	mov 	\_reg, x9
607461c0a5dSElizabeth Ho.endm
608461c0a5dSElizabeth Ho
609ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
610ed108b56SAlexei Fedorov * This routine assumes that the SP_EL3 is pointing to a valid
611ed108b56SAlexei Fedorov * context structure from where the gp regs and other special
612ed108b56SAlexei Fedorov * registers can be retrieved.
613ed108b56SAlexei Fedorov * ------------------------------------------------------------------
614532ed618SSoby Mathew */
615532ed618SSoby Mathewfunc el3_exit
616bb9549baSJan Dabros#if ENABLE_ASSERTIONS
617bb9549baSJan Dabros	/* el3_exit assumes SP_EL0 on entry */
618bb9549baSJan Dabros	mrs	x17, spsel
619bb9549baSJan Dabros	cmp	x17, #MODE_SP_EL0
620bb9549baSJan Dabros	ASM_ASSERT(eq)
6210ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_ASSERTIONS */
622bb9549baSJan Dabros
623ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
624ed108b56SAlexei Fedorov	 * Save the current SP_EL0 i.e. the EL3 runtime stack which
625ed108b56SAlexei Fedorov	 * will be used for handling the next SMC.
626ed108b56SAlexei Fedorov	 * Then switch to SP_EL3.
627ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
628532ed618SSoby Mathew	 */
629532ed618SSoby Mathew	mov	x17, sp
630ed108b56SAlexei Fedorov	msr	spsel, #MODE_SP_ELX
631532ed618SSoby Mathew	str	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
632532ed618SSoby Mathew
6330c5e7d1cSMax Shvetsov	/* ----------------------------------------------------------
63468ac5ed0SArunachalam Ganapathy	 * Restore CPTR_EL3.
6350c5e7d1cSMax Shvetsov	 * ZCR is only restored if SVE is supported and enabled.
6360c5e7d1cSMax Shvetsov	 * Synchronization is required before zcr_el3 is addressed.
6370c5e7d1cSMax Shvetsov	 * ----------------------------------------------------------
6380c5e7d1cSMax Shvetsov	 */
639461c0a5dSElizabeth Ho
640461c0a5dSElizabeth Ho	/* The address of the per_world context is stored in x9 */
641461c0a5dSElizabeth Ho	get_per_world_context x9
642461c0a5dSElizabeth Ho
643461c0a5dSElizabeth Ho	ldp	x19, x20, [x9, #CTX_CPTR_EL3]
6440c5e7d1cSMax Shvetsov	msr	cptr_el3, x19
6450c5e7d1cSMax Shvetsov
646f0c96a2eSBoyan Karatotev#if IMAGE_BL31
6470c5e7d1cSMax Shvetsov	ands	x19, x19, #CPTR_EZ_BIT
6480c5e7d1cSMax Shvetsov	beq	sve_not_enabled
6490c5e7d1cSMax Shvetsov
6500c5e7d1cSMax Shvetsov	isb
6510c5e7d1cSMax Shvetsov	msr	S3_6_C1_C2_0, x20 /* zcr_el3 */
6520c5e7d1cSMax Shvetsovsve_not_enabled:
653edebefbcSArvind Ram Prakash
654edebefbcSArvind Ram Prakash	restore_mpam3_el3
655edebefbcSArvind Ram Prakash
6560ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 */
6570c5e7d1cSMax Shvetsov
658fe007b2eSDimitris Papastamos#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
659ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
660ed108b56SAlexei Fedorov	 * Restore mitigation state as it was on entry to EL3
661ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
662ed108b56SAlexei Fedorov	 */
663fe007b2eSDimitris Papastamos	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
664ed108b56SAlexei Fedorov	cbz	x17, 1f
665fe007b2eSDimitris Papastamos	blr	x17
6664d1ccf0eSAntonio Nino Diaz1:
6670ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */
6680ce220afSJayanth Dodderi Chidanand
6696597fcf1SManish Pandey#if IMAGE_BL31
6706597fcf1SManish Pandey	synchronize_errors
6716597fcf1SManish Pandey#endif /* IMAGE_BL31 */
6720ce220afSJayanth Dodderi Chidanand
673ff1d2ef3SManish Pandey	/* ----------------------------------------------------------
674ff1d2ef3SManish Pandey	 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
675ff1d2ef3SManish Pandey	 * ----------------------------------------------------------
676ff1d2ef3SManish Pandey	 */
677ff1d2ef3SManish Pandey	ldr	x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
678ff1d2ef3SManish Pandey	ldp	x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
679ff1d2ef3SManish Pandey	msr	scr_el3, x18
680ff1d2ef3SManish Pandey	msr	spsr_el3, x16
681ff1d2ef3SManish Pandey	msr	elr_el3, x17
682ff1d2ef3SManish Pandey
683ff1d2ef3SManish Pandey	restore_ptw_el1_sys_regs
684ff1d2ef3SManish Pandey
685ff1d2ef3SManish Pandey	/* ----------------------------------------------------------
686ff1d2ef3SManish Pandey	 * Restore general purpose (including x30), PMCR_EL0 and
687ff1d2ef3SManish Pandey	 * ARMv8.3-PAuth registers.
688ff1d2ef3SManish Pandey	 * Exit EL3 via ERET to a lower exception level.
689ff1d2ef3SManish Pandey 	 * ----------------------------------------------------------
690ff1d2ef3SManish Pandey 	 */
691ff1d2ef3SManish Pandey	bl	restore_gp_pmcr_pauth_regs
692ff1d2ef3SManish Pandey	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
693ff1d2ef3SManish Pandey
694c2d32a5fSMadhukar Pappireddy#ifdef IMAGE_BL31
695d04c04a4SManish Pandey	/* Clear the EL3 flag as we are exiting el3 */
696d04c04a4SManish Pandey	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_NESTED_EA_FLAG]
6970ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 */
6980ce220afSJayanth Dodderi Chidanand
699f461fe34SAnthony Steinhauser	exception_return
7005283962eSAntonio Nino Diaz
701532ed618SSoby Mathewendfunc el3_exit
702