xref: /rk3399_ARM-atf/lib/el3_runtime/aarch64/context.S (revision c73686a11cea8f9d22d7df3c5480f8824cfeec09)
1532ed618SSoby Mathew/*
2ed804406SRohit Mathew * Copyright (c) 2013-2023, Arm Limited and Contributors. All rights reserved.
3532ed618SSoby Mathew *
482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause
5532ed618SSoby Mathew */
6532ed618SSoby Mathew
7532ed618SSoby Mathew#include <arch.h>
8532ed618SSoby Mathew#include <asm_macros.S>
9bb9549baSJan Dabros#include <assert_macros.S>
10532ed618SSoby Mathew#include <context.h>
113b8456bdSManish V Badarkhe#include <el3_common_macros.S>
12532ed618SSoby Mathew
1328f39f02SMax Shvetsov#if CTX_INCLUDE_EL2_REGS
14d20052f3SZelalem Aweke	.global	el2_sysregs_context_save_common
15d20052f3SZelalem Aweke	.global	el2_sysregs_context_restore_common
16d20052f3SZelalem Aweke#if CTX_INCLUDE_MTE_REGS
17d20052f3SZelalem Aweke	.global	el2_sysregs_context_save_mte
18d20052f3SZelalem Aweke	.global	el2_sysregs_context_restore_mte
19d20052f3SZelalem Aweke#endif /* CTX_INCLUDE_MTE_REGS */
200ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_EL2_REGS */
2128f39f02SMax Shvetsov
22532ed618SSoby Mathew	.global	el1_sysregs_context_save
23532ed618SSoby Mathew	.global	el1_sysregs_context_restore
24532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS
25532ed618SSoby Mathew	.global	fpregs_context_save
26532ed618SSoby Mathew	.global	fpregs_context_restore
270ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_FPREGS */
2897215e0fSDaniel Boulby	.global	prepare_el3_entry
29ed108b56SAlexei Fedorov	.global	restore_gp_pmcr_pauth_regs
303b8456bdSManish V Badarkhe	.global save_and_update_ptw_el1_sys_regs
31532ed618SSoby Mathew	.global	el3_exit
32532ed618SSoby Mathew
3328f39f02SMax Shvetsov#if CTX_INCLUDE_EL2_REGS
3428f39f02SMax Shvetsov
3528f39f02SMax Shvetsov/* -----------------------------------------------------
36d20052f3SZelalem Aweke * The following functions strictly follow the AArch64
37a7cf2743SMax Shvetsov * PCS to use x9-x16 (temporary caller-saved registers)
38d20052f3SZelalem Aweke * to save/restore EL2 system register context.
39d20052f3SZelalem Aweke * el2_sysregs_context_save/restore_common functions
40d20052f3SZelalem Aweke * save and restore registers that are common to all
41d20052f3SZelalem Aweke * configurations. The rest of the functions save and
42d20052f3SZelalem Aweke * restore EL2 system registers that are present when a
43d20052f3SZelalem Aweke * particular feature is enabled. All functions assume
44d20052f3SZelalem Aweke * that 'x0' is pointing to a 'el2_sys_regs' structure
45d20052f3SZelalem Aweke * where the register context will be saved/restored.
462825946eSMax Shvetsov *
472825946eSMax Shvetsov * The following registers are not added.
482825946eSMax Shvetsov * AMEVCNTVOFF0<n>_EL2
492825946eSMax Shvetsov * AMEVCNTVOFF1<n>_EL2
502825946eSMax Shvetsov * ICH_AP0R<n>_EL2
512825946eSMax Shvetsov * ICH_AP1R<n>_EL2
522825946eSMax Shvetsov * ICH_LR<n>_EL2
5328f39f02SMax Shvetsov * -----------------------------------------------------
5428f39f02SMax Shvetsov */
55d20052f3SZelalem Awekefunc el2_sysregs_context_save_common
5628f39f02SMax Shvetsov	mrs	x9, actlr_el2
572825946eSMax Shvetsov	mrs	x10, afsr0_el2
582825946eSMax Shvetsov	stp	x9, x10, [x0, #CTX_ACTLR_EL2]
5928f39f02SMax Shvetsov
602825946eSMax Shvetsov	mrs	x11, afsr1_el2
612825946eSMax Shvetsov	mrs	x12, amair_el2
622825946eSMax Shvetsov	stp	x11, x12, [x0, #CTX_AFSR1_EL2]
6328f39f02SMax Shvetsov
642825946eSMax Shvetsov	mrs	x13, cnthctl_el2
65a7cf2743SMax Shvetsov	mrs	x14, cntvoff_el2
662825946eSMax Shvetsov	stp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
6728f39f02SMax Shvetsov
68a7cf2743SMax Shvetsov	mrs	x15, cptr_el2
69a7cf2743SMax Shvetsov	str	x15, [x0, #CTX_CPTR_EL2]
7028f39f02SMax Shvetsov
710f777eabSArunachalam Ganapathy#if CTX_INCLUDE_AARCH32_REGS
72a7cf2743SMax Shvetsov	mrs	x16, dbgvcr32_el2
73a7cf2743SMax Shvetsov	str	x16, [x0, #CTX_DBGVCR32_EL2]
740ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */
7528f39f02SMax Shvetsov
76a7cf2743SMax Shvetsov	mrs	x9, elr_el2
77a7cf2743SMax Shvetsov	mrs	x10, esr_el2
78a7cf2743SMax Shvetsov	stp	x9, x10, [x0, #CTX_ELR_EL2]
7928f39f02SMax Shvetsov
80a7cf2743SMax Shvetsov	mrs	x11, far_el2
81a7cf2743SMax Shvetsov	mrs	x12, hacr_el2
82a7cf2743SMax Shvetsov	stp	x11, x12, [x0, #CTX_FAR_EL2]
8328f39f02SMax Shvetsov
84a7cf2743SMax Shvetsov	mrs	x13, hcr_el2
85a7cf2743SMax Shvetsov	mrs	x14, hpfar_el2
86a7cf2743SMax Shvetsov	stp	x13, x14, [x0, #CTX_HCR_EL2]
8728f39f02SMax Shvetsov
88a7cf2743SMax Shvetsov	mrs	x15, hstr_el2
89a7cf2743SMax Shvetsov	mrs	x16, ICC_SRE_EL2
90a7cf2743SMax Shvetsov	stp	x15, x16, [x0, #CTX_HSTR_EL2]
9128f39f02SMax Shvetsov
92a7cf2743SMax Shvetsov	mrs	x9, ICH_HCR_EL2
93a7cf2743SMax Shvetsov	mrs	x10, ICH_VMCR_EL2
94a7cf2743SMax Shvetsov	stp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
9528f39f02SMax Shvetsov
96a7cf2743SMax Shvetsov	mrs	x11, mair_el2
97a7cf2743SMax Shvetsov	mrs	x12, mdcr_el2
98a7cf2743SMax Shvetsov	stp	x11, x12, [x0, #CTX_MAIR_EL2]
99a7cf2743SMax Shvetsov
100a7cf2743SMax Shvetsov	mrs	x14, sctlr_el2
101a7cf2743SMax Shvetsov	str	x14, [x0, #CTX_SCTLR_EL2]
10228f39f02SMax Shvetsov
103a7cf2743SMax Shvetsov	mrs	x15, spsr_el2
104a7cf2743SMax Shvetsov	mrs	x16, sp_el2
105a7cf2743SMax Shvetsov	stp	x15, x16, [x0, #CTX_SPSR_EL2]
10628f39f02SMax Shvetsov
107a7cf2743SMax Shvetsov	mrs	x9, tcr_el2
108a7cf2743SMax Shvetsov	mrs	x10, tpidr_el2
109a7cf2743SMax Shvetsov	stp	x9, x10, [x0, #CTX_TCR_EL2]
11028f39f02SMax Shvetsov
111a7cf2743SMax Shvetsov	mrs	x11, ttbr0_el2
112a7cf2743SMax Shvetsov	mrs	x12, vbar_el2
113a7cf2743SMax Shvetsov	stp	x11, x12, [x0, #CTX_TTBR0_EL2]
11428f39f02SMax Shvetsov
115a7cf2743SMax Shvetsov	mrs	x13, vmpidr_el2
116a7cf2743SMax Shvetsov	mrs	x14, vpidr_el2
117a7cf2743SMax Shvetsov	stp	x13, x14, [x0, #CTX_VMPIDR_EL2]
11828f39f02SMax Shvetsov
119a7cf2743SMax Shvetsov	mrs	x15, vtcr_el2
120a7cf2743SMax Shvetsov	mrs	x16, vttbr_el2
121a7cf2743SMax Shvetsov	stp	x15, x16, [x0, #CTX_VTCR_EL2]
12228f39f02SMax Shvetsov	ret
123d20052f3SZelalem Awekeendfunc el2_sysregs_context_save_common
12428f39f02SMax Shvetsov
125d20052f3SZelalem Awekefunc el2_sysregs_context_restore_common
1262825946eSMax Shvetsov	ldp	x9, x10, [x0, #CTX_ACTLR_EL2]
12728f39f02SMax Shvetsov	msr	actlr_el2, x9
1282825946eSMax Shvetsov	msr	afsr0_el2, x10
12928f39f02SMax Shvetsov
1302825946eSMax Shvetsov	ldp	x11, x12, [x0, #CTX_AFSR1_EL2]
1312825946eSMax Shvetsov	msr	afsr1_el2, x11
1322825946eSMax Shvetsov	msr	amair_el2, x12
13328f39f02SMax Shvetsov
1342825946eSMax Shvetsov	ldp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
1352825946eSMax Shvetsov	msr	cnthctl_el2, x13
136a7cf2743SMax Shvetsov	msr	cntvoff_el2, x14
13728f39f02SMax Shvetsov
138a7cf2743SMax Shvetsov	ldr	x15, [x0, #CTX_CPTR_EL2]
139a7cf2743SMax Shvetsov	msr	cptr_el2, x15
14028f39f02SMax Shvetsov
1410f777eabSArunachalam Ganapathy#if CTX_INCLUDE_AARCH32_REGS
142a7cf2743SMax Shvetsov	ldr	x16, [x0, #CTX_DBGVCR32_EL2]
143a7cf2743SMax Shvetsov	msr	dbgvcr32_el2, x16
1440ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */
14528f39f02SMax Shvetsov
146a7cf2743SMax Shvetsov	ldp	x9, x10, [x0, #CTX_ELR_EL2]
147a7cf2743SMax Shvetsov	msr	elr_el2, x9
148a7cf2743SMax Shvetsov	msr	esr_el2, x10
14928f39f02SMax Shvetsov
150a7cf2743SMax Shvetsov	ldp	x11, x12, [x0, #CTX_FAR_EL2]
151a7cf2743SMax Shvetsov	msr	far_el2, x11
152a7cf2743SMax Shvetsov	msr	hacr_el2, x12
15328f39f02SMax Shvetsov
154a7cf2743SMax Shvetsov	ldp	x13, x14, [x0, #CTX_HCR_EL2]
155a7cf2743SMax Shvetsov	msr	hcr_el2, x13
156a7cf2743SMax Shvetsov	msr	hpfar_el2, x14
15728f39f02SMax Shvetsov
158a7cf2743SMax Shvetsov	ldp	x15, x16, [x0, #CTX_HSTR_EL2]
159a7cf2743SMax Shvetsov	msr	hstr_el2, x15
160a7cf2743SMax Shvetsov	msr	ICC_SRE_EL2, x16
16128f39f02SMax Shvetsov
162a7cf2743SMax Shvetsov	ldp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
163a7cf2743SMax Shvetsov	msr	ICH_HCR_EL2, x9
164a7cf2743SMax Shvetsov	msr	ICH_VMCR_EL2, x10
165a7cf2743SMax Shvetsov
166a7cf2743SMax Shvetsov	ldp	x11, x12, [x0, #CTX_MAIR_EL2]
167a7cf2743SMax Shvetsov	msr	mair_el2, x11
168a7cf2743SMax Shvetsov	msr	mdcr_el2, x12
16928f39f02SMax Shvetsov
170a7cf2743SMax Shvetsov	ldr	x14, [x0, #CTX_SCTLR_EL2]
171a7cf2743SMax Shvetsov	msr	sctlr_el2, x14
17228f39f02SMax Shvetsov
173a7cf2743SMax Shvetsov	ldp	x15, x16, [x0, #CTX_SPSR_EL2]
174a7cf2743SMax Shvetsov	msr	spsr_el2, x15
175a7cf2743SMax Shvetsov	msr	sp_el2, x16
17628f39f02SMax Shvetsov
177a7cf2743SMax Shvetsov	ldp	x9, x10, [x0, #CTX_TCR_EL2]
178a7cf2743SMax Shvetsov	msr	tcr_el2, x9
179a7cf2743SMax Shvetsov	msr	tpidr_el2, x10
18028f39f02SMax Shvetsov
181a7cf2743SMax Shvetsov	ldp	x11, x12, [x0, #CTX_TTBR0_EL2]
182a7cf2743SMax Shvetsov	msr	ttbr0_el2, x11
183a7cf2743SMax Shvetsov	msr	vbar_el2, x12
18428f39f02SMax Shvetsov
185a7cf2743SMax Shvetsov	ldp	x13, x14, [x0, #CTX_VMPIDR_EL2]
186a7cf2743SMax Shvetsov	msr	vmpidr_el2, x13
187a7cf2743SMax Shvetsov	msr	vpidr_el2, x14
18828f39f02SMax Shvetsov
189a7cf2743SMax Shvetsov	ldp	x15, x16, [x0, #CTX_VTCR_EL2]
190a7cf2743SMax Shvetsov	msr	vtcr_el2, x15
191a7cf2743SMax Shvetsov	msr	vttbr_el2, x16
192d20052f3SZelalem Aweke	ret
193d20052f3SZelalem Awekeendfunc el2_sysregs_context_restore_common
194d20052f3SZelalem Aweke
1952825946eSMax Shvetsov#if CTX_INCLUDE_MTE_REGS
196d20052f3SZelalem Awekefunc el2_sysregs_context_save_mte
197d20052f3SZelalem Aweke	mrs	x9, TFSR_EL2
198d20052f3SZelalem Aweke	str	x9, [x0, #CTX_TFSR_EL2]
199d20052f3SZelalem Aweke	ret
200d20052f3SZelalem Awekeendfunc el2_sysregs_context_save_mte
201d20052f3SZelalem Aweke
202d20052f3SZelalem Awekefunc el2_sysregs_context_restore_mte
203fb2072b0SManish V Badarkhe	ldr	x9, [x0, #CTX_TFSR_EL2]
204fb2072b0SManish V Badarkhe	msr	TFSR_EL2, x9
205d20052f3SZelalem Aweke	ret
206d20052f3SZelalem Awekeendfunc el2_sysregs_context_restore_mte
2070ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_MTE_REGS */
20828f39f02SMax Shvetsov
20928f39f02SMax Shvetsov#endif /* CTX_INCLUDE_EL2_REGS */
21028f39f02SMax Shvetsov
211ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
212ed108b56SAlexei Fedorov * The following function strictly follows the AArch64 PCS to use
213ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers) to save EL1 system
214ed108b56SAlexei Fedorov * register context. It assumes that 'x0' is pointing to a
215ed108b56SAlexei Fedorov * 'el1_sys_regs' structure where the register context will be saved.
216ed108b56SAlexei Fedorov * ------------------------------------------------------------------
217532ed618SSoby Mathew */
218532ed618SSoby Mathewfunc el1_sysregs_context_save
219532ed618SSoby Mathew
220532ed618SSoby Mathew	mrs	x9, spsr_el1
221532ed618SSoby Mathew	mrs	x10, elr_el1
222532ed618SSoby Mathew	stp	x9, x10, [x0, #CTX_SPSR_EL1]
223532ed618SSoby Mathew
2243b8456bdSManish V Badarkhe#if !ERRATA_SPECULATIVE_AT
225532ed618SSoby Mathew	mrs	x15, sctlr_el1
226cb55615cSManish V Badarkhe	mrs	x16, tcr_el1
227532ed618SSoby Mathew	stp	x15, x16, [x0, #CTX_SCTLR_EL1]
2280ce220afSJayanth Dodderi Chidanand#endif /* ERRATA_SPECULATIVE_AT */
229532ed618SSoby Mathew
230532ed618SSoby Mathew	mrs	x17, cpacr_el1
231532ed618SSoby Mathew	mrs	x9, csselr_el1
232532ed618SSoby Mathew	stp	x17, x9, [x0, #CTX_CPACR_EL1]
233532ed618SSoby Mathew
234532ed618SSoby Mathew	mrs	x10, sp_el1
235532ed618SSoby Mathew	mrs	x11, esr_el1
236532ed618SSoby Mathew	stp	x10, x11, [x0, #CTX_SP_EL1]
237532ed618SSoby Mathew
238532ed618SSoby Mathew	mrs	x12, ttbr0_el1
239532ed618SSoby Mathew	mrs	x13, ttbr1_el1
240532ed618SSoby Mathew	stp	x12, x13, [x0, #CTX_TTBR0_EL1]
241532ed618SSoby Mathew
242532ed618SSoby Mathew	mrs	x14, mair_el1
243532ed618SSoby Mathew	mrs	x15, amair_el1
244532ed618SSoby Mathew	stp	x14, x15, [x0, #CTX_MAIR_EL1]
245532ed618SSoby Mathew
246cb55615cSManish V Badarkhe	mrs	x16, actlr_el1
247532ed618SSoby Mathew	mrs	x17, tpidr_el1
248cb55615cSManish V Badarkhe	stp	x16, x17, [x0, #CTX_ACTLR_EL1]
249532ed618SSoby Mathew
250532ed618SSoby Mathew	mrs	x9, tpidr_el0
251532ed618SSoby Mathew	mrs	x10, tpidrro_el0
252532ed618SSoby Mathew	stp	x9, x10, [x0, #CTX_TPIDR_EL0]
253532ed618SSoby Mathew
254532ed618SSoby Mathew	mrs	x13, par_el1
255532ed618SSoby Mathew	mrs	x14, far_el1
256532ed618SSoby Mathew	stp	x13, x14, [x0, #CTX_PAR_EL1]
257532ed618SSoby Mathew
258532ed618SSoby Mathew	mrs	x15, afsr0_el1
259532ed618SSoby Mathew	mrs	x16, afsr1_el1
260532ed618SSoby Mathew	stp	x15, x16, [x0, #CTX_AFSR0_EL1]
261532ed618SSoby Mathew
262532ed618SSoby Mathew	mrs	x17, contextidr_el1
263532ed618SSoby Mathew	mrs	x9, vbar_el1
264532ed618SSoby Mathew	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
265532ed618SSoby Mathew
266532ed618SSoby Mathew	/* Save AArch32 system registers if the build has instructed so */
267532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS
268532ed618SSoby Mathew	mrs	x11, spsr_abt
269532ed618SSoby Mathew	mrs	x12, spsr_und
270532ed618SSoby Mathew	stp	x11, x12, [x0, #CTX_SPSR_ABT]
271532ed618SSoby Mathew
272532ed618SSoby Mathew	mrs	x13, spsr_irq
273532ed618SSoby Mathew	mrs	x14, spsr_fiq
274532ed618SSoby Mathew	stp	x13, x14, [x0, #CTX_SPSR_IRQ]
275532ed618SSoby Mathew
276532ed618SSoby Mathew	mrs	x15, dacr32_el2
277532ed618SSoby Mathew	mrs	x16, ifsr32_el2
278532ed618SSoby Mathew	stp	x15, x16, [x0, #CTX_DACR32_EL2]
2790ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */
280532ed618SSoby Mathew
281532ed618SSoby Mathew	/* Save NS timer registers if the build has instructed so */
282532ed618SSoby Mathew#if NS_TIMER_SWITCH
283532ed618SSoby Mathew	mrs	x10, cntp_ctl_el0
284532ed618SSoby Mathew	mrs	x11, cntp_cval_el0
285532ed618SSoby Mathew	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
286532ed618SSoby Mathew
287532ed618SSoby Mathew	mrs	x12, cntv_ctl_el0
288532ed618SSoby Mathew	mrs	x13, cntv_cval_el0
289532ed618SSoby Mathew	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
290532ed618SSoby Mathew
291532ed618SSoby Mathew	mrs	x14, cntkctl_el1
292532ed618SSoby Mathew	str	x14, [x0, #CTX_CNTKCTL_EL1]
2930ce220afSJayanth Dodderi Chidanand#endif /* NS_TIMER_SWITCH */
294532ed618SSoby Mathew
2959dd94382SJustin Chadwell	/* Save MTE system registers if the build has instructed so */
2969dd94382SJustin Chadwell#if CTX_INCLUDE_MTE_REGS
2979dd94382SJustin Chadwell	mrs	x15, TFSRE0_EL1
2989dd94382SJustin Chadwell	mrs	x16, TFSR_EL1
2999dd94382SJustin Chadwell	stp	x15, x16, [x0, #CTX_TFSRE0_EL1]
3009dd94382SJustin Chadwell
3019dd94382SJustin Chadwell	mrs	x9, RGSR_EL1
3029dd94382SJustin Chadwell	mrs	x10, GCR_EL1
3039dd94382SJustin Chadwell	stp	x9, x10, [x0, #CTX_RGSR_EL1]
3040ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_MTE_REGS */
3059dd94382SJustin Chadwell
306532ed618SSoby Mathew	ret
307532ed618SSoby Mathewendfunc el1_sysregs_context_save
308532ed618SSoby Mathew
309ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
310ed108b56SAlexei Fedorov * The following function strictly follows the AArch64 PCS to use
311ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers) to restore EL1 system
312ed108b56SAlexei Fedorov * register context.  It assumes that 'x0' is pointing to a
313ed108b56SAlexei Fedorov * 'el1_sys_regs' structure from where the register context will be
314ed108b56SAlexei Fedorov * restored
315ed108b56SAlexei Fedorov * ------------------------------------------------------------------
316532ed618SSoby Mathew */
317532ed618SSoby Mathewfunc el1_sysregs_context_restore
318532ed618SSoby Mathew
319532ed618SSoby Mathew	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
320532ed618SSoby Mathew	msr	spsr_el1, x9
321532ed618SSoby Mathew	msr	elr_el1, x10
322532ed618SSoby Mathew
3233b8456bdSManish V Badarkhe#if !ERRATA_SPECULATIVE_AT
324fb2072b0SManish V Badarkhe	ldp	x15, x16, [x0, #CTX_SCTLR_EL1]
325fb2072b0SManish V Badarkhe	msr	sctlr_el1, x15
326cb55615cSManish V Badarkhe	msr	tcr_el1, x16
3270ce220afSJayanth Dodderi Chidanand#endif /* ERRATA_SPECULATIVE_AT */
328532ed618SSoby Mathew
329532ed618SSoby Mathew	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
330532ed618SSoby Mathew	msr	cpacr_el1, x17
331532ed618SSoby Mathew	msr	csselr_el1, x9
332532ed618SSoby Mathew
333532ed618SSoby Mathew	ldp	x10, x11, [x0, #CTX_SP_EL1]
334532ed618SSoby Mathew	msr	sp_el1, x10
335532ed618SSoby Mathew	msr	esr_el1, x11
336532ed618SSoby Mathew
337532ed618SSoby Mathew	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
338532ed618SSoby Mathew	msr	ttbr0_el1, x12
339532ed618SSoby Mathew	msr	ttbr1_el1, x13
340532ed618SSoby Mathew
341532ed618SSoby Mathew	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
342532ed618SSoby Mathew	msr	mair_el1, x14
343532ed618SSoby Mathew	msr	amair_el1, x15
344532ed618SSoby Mathew
345cb55615cSManish V Badarkhe	ldp 	x16, x17, [x0, #CTX_ACTLR_EL1]
346cb55615cSManish V Badarkhe	msr	actlr_el1, x16
347fb2072b0SManish V Badarkhe	msr	tpidr_el1, x17
348532ed618SSoby Mathew
349532ed618SSoby Mathew	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
350532ed618SSoby Mathew	msr	tpidr_el0, x9
351532ed618SSoby Mathew	msr	tpidrro_el0, x10
352532ed618SSoby Mathew
353532ed618SSoby Mathew	ldp	x13, x14, [x0, #CTX_PAR_EL1]
354532ed618SSoby Mathew	msr	par_el1, x13
355532ed618SSoby Mathew	msr	far_el1, x14
356532ed618SSoby Mathew
357532ed618SSoby Mathew	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
358532ed618SSoby Mathew	msr	afsr0_el1, x15
359532ed618SSoby Mathew	msr	afsr1_el1, x16
360532ed618SSoby Mathew
361532ed618SSoby Mathew	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
362532ed618SSoby Mathew	msr	contextidr_el1, x17
363532ed618SSoby Mathew	msr	vbar_el1, x9
364532ed618SSoby Mathew
365532ed618SSoby Mathew	/* Restore AArch32 system registers if the build has instructed so */
366532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS
367532ed618SSoby Mathew	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
368532ed618SSoby Mathew	msr	spsr_abt, x11
369532ed618SSoby Mathew	msr	spsr_und, x12
370532ed618SSoby Mathew
371532ed618SSoby Mathew	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
372532ed618SSoby Mathew	msr	spsr_irq, x13
373532ed618SSoby Mathew	msr	spsr_fiq, x14
374532ed618SSoby Mathew
375532ed618SSoby Mathew	ldp	x15, x16, [x0, #CTX_DACR32_EL2]
376532ed618SSoby Mathew	msr	dacr32_el2, x15
377532ed618SSoby Mathew	msr	ifsr32_el2, x16
3780ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */
3790ce220afSJayanth Dodderi Chidanand
380532ed618SSoby Mathew	/* Restore NS timer registers if the build has instructed so */
381532ed618SSoby Mathew#if NS_TIMER_SWITCH
382532ed618SSoby Mathew	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
383532ed618SSoby Mathew	msr	cntp_ctl_el0, x10
384532ed618SSoby Mathew	msr	cntp_cval_el0, x11
385532ed618SSoby Mathew
386532ed618SSoby Mathew	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
387532ed618SSoby Mathew	msr	cntv_ctl_el0, x12
388532ed618SSoby Mathew	msr	cntv_cval_el0, x13
389532ed618SSoby Mathew
390532ed618SSoby Mathew	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
391532ed618SSoby Mathew	msr	cntkctl_el1, x14
3920ce220afSJayanth Dodderi Chidanand#endif /* NS_TIMER_SWITCH */
3930ce220afSJayanth Dodderi Chidanand
3949dd94382SJustin Chadwell	/* Restore MTE system registers if the build has instructed so */
3959dd94382SJustin Chadwell#if CTX_INCLUDE_MTE_REGS
3969dd94382SJustin Chadwell	ldp	x11, x12, [x0, #CTX_TFSRE0_EL1]
3979dd94382SJustin Chadwell	msr	TFSRE0_EL1, x11
3989dd94382SJustin Chadwell	msr	TFSR_EL1, x12
3999dd94382SJustin Chadwell
4009dd94382SJustin Chadwell	ldp	x13, x14, [x0, #CTX_RGSR_EL1]
4019dd94382SJustin Chadwell	msr	RGSR_EL1, x13
4029dd94382SJustin Chadwell	msr	GCR_EL1, x14
4030ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_MTE_REGS */
404532ed618SSoby Mathew
405532ed618SSoby Mathew	/* No explict ISB required here as ERET covers it */
406532ed618SSoby Mathew	ret
407532ed618SSoby Mathewendfunc el1_sysregs_context_restore
408532ed618SSoby Mathew
409ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
410ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use
411ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
412ed108b56SAlexei Fedorov * to save floating point register context. It assumes that 'x0' is
413ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure where the register context will
414532ed618SSoby Mathew * be saved.
415532ed618SSoby Mathew *
416ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set.
417ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in
418ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared.
419532ed618SSoby Mathew *
420532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world
421ed108b56SAlexei Fedorov * ------------------------------------------------------------------
422532ed618SSoby Mathew */
423532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS
424532ed618SSoby Mathewfunc fpregs_context_save
425532ed618SSoby Mathew	stp	q0, q1, [x0, #CTX_FP_Q0]
426532ed618SSoby Mathew	stp	q2, q3, [x0, #CTX_FP_Q2]
427532ed618SSoby Mathew	stp	q4, q5, [x0, #CTX_FP_Q4]
428532ed618SSoby Mathew	stp	q6, q7, [x0, #CTX_FP_Q6]
429532ed618SSoby Mathew	stp	q8, q9, [x0, #CTX_FP_Q8]
430532ed618SSoby Mathew	stp	q10, q11, [x0, #CTX_FP_Q10]
431532ed618SSoby Mathew	stp	q12, q13, [x0, #CTX_FP_Q12]
432532ed618SSoby Mathew	stp	q14, q15, [x0, #CTX_FP_Q14]
433532ed618SSoby Mathew	stp	q16, q17, [x0, #CTX_FP_Q16]
434532ed618SSoby Mathew	stp	q18, q19, [x0, #CTX_FP_Q18]
435532ed618SSoby Mathew	stp	q20, q21, [x0, #CTX_FP_Q20]
436532ed618SSoby Mathew	stp	q22, q23, [x0, #CTX_FP_Q22]
437532ed618SSoby Mathew	stp	q24, q25, [x0, #CTX_FP_Q24]
438532ed618SSoby Mathew	stp	q26, q27, [x0, #CTX_FP_Q26]
439532ed618SSoby Mathew	stp	q28, q29, [x0, #CTX_FP_Q28]
440532ed618SSoby Mathew	stp	q30, q31, [x0, #CTX_FP_Q30]
441532ed618SSoby Mathew
442532ed618SSoby Mathew	mrs	x9, fpsr
443532ed618SSoby Mathew	str	x9, [x0, #CTX_FP_FPSR]
444532ed618SSoby Mathew
445532ed618SSoby Mathew	mrs	x10, fpcr
446532ed618SSoby Mathew	str	x10, [x0, #CTX_FP_FPCR]
447532ed618SSoby Mathew
44891089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS
44991089f36SDavid Cunado	mrs	x11, fpexc32_el2
45091089f36SDavid Cunado	str	x11, [x0, #CTX_FP_FPEXC32_EL2]
4510ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */
452532ed618SSoby Mathew	ret
453532ed618SSoby Mathewendfunc fpregs_context_save
454532ed618SSoby Mathew
455ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
456ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use x9-x17
457ed108b56SAlexei Fedorov * (temporary caller-saved registers according to AArch64 PCS) to
458ed108b56SAlexei Fedorov * restore floating point register context. It assumes that 'x0' is
459ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure from where the register context
460532ed618SSoby Mathew * will be restored.
461532ed618SSoby Mathew *
462ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set.
463ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in
464ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared.
465532ed618SSoby Mathew *
466532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world
467ed108b56SAlexei Fedorov * ------------------------------------------------------------------
468532ed618SSoby Mathew */
469532ed618SSoby Mathewfunc fpregs_context_restore
470532ed618SSoby Mathew	ldp	q0, q1, [x0, #CTX_FP_Q0]
471532ed618SSoby Mathew	ldp	q2, q3, [x0, #CTX_FP_Q2]
472532ed618SSoby Mathew	ldp	q4, q5, [x0, #CTX_FP_Q4]
473532ed618SSoby Mathew	ldp	q6, q7, [x0, #CTX_FP_Q6]
474532ed618SSoby Mathew	ldp	q8, q9, [x0, #CTX_FP_Q8]
475532ed618SSoby Mathew	ldp	q10, q11, [x0, #CTX_FP_Q10]
476532ed618SSoby Mathew	ldp	q12, q13, [x0, #CTX_FP_Q12]
477532ed618SSoby Mathew	ldp	q14, q15, [x0, #CTX_FP_Q14]
478532ed618SSoby Mathew	ldp	q16, q17, [x0, #CTX_FP_Q16]
479532ed618SSoby Mathew	ldp	q18, q19, [x0, #CTX_FP_Q18]
480532ed618SSoby Mathew	ldp	q20, q21, [x0, #CTX_FP_Q20]
481532ed618SSoby Mathew	ldp	q22, q23, [x0, #CTX_FP_Q22]
482532ed618SSoby Mathew	ldp	q24, q25, [x0, #CTX_FP_Q24]
483532ed618SSoby Mathew	ldp	q26, q27, [x0, #CTX_FP_Q26]
484532ed618SSoby Mathew	ldp	q28, q29, [x0, #CTX_FP_Q28]
485532ed618SSoby Mathew	ldp	q30, q31, [x0, #CTX_FP_Q30]
486532ed618SSoby Mathew
487532ed618SSoby Mathew	ldr	x9, [x0, #CTX_FP_FPSR]
488532ed618SSoby Mathew	msr	fpsr, x9
489532ed618SSoby Mathew
490532ed618SSoby Mathew	ldr	x10, [x0, #CTX_FP_FPCR]
491532ed618SSoby Mathew	msr	fpcr, x10
492532ed618SSoby Mathew
49391089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS
49491089f36SDavid Cunado	ldr	x11, [x0, #CTX_FP_FPEXC32_EL2]
49591089f36SDavid Cunado	msr	fpexc32_el2, x11
4960ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */
4970ce220afSJayanth Dodderi Chidanand
498532ed618SSoby Mathew	/*
499532ed618SSoby Mathew	 * No explict ISB required here as ERET to
500532ed618SSoby Mathew	 * switch to secure EL1 or non-secure world
501532ed618SSoby Mathew	 * covers it
502532ed618SSoby Mathew	 */
503532ed618SSoby Mathew
504532ed618SSoby Mathew	ret
505532ed618SSoby Mathewendfunc fpregs_context_restore
506532ed618SSoby Mathew#endif /* CTX_INCLUDE_FPREGS */
507532ed618SSoby Mathew
5087d33ffe4SDaniel Boulby	/*
5091cbe42a5SManish Pandey	 * Set SCR_EL3.EA bit to enable SErrors at EL3
5101cbe42a5SManish Pandey	 */
5111cbe42a5SManish Pandey	.macro enable_serror_at_el3
5121cbe42a5SManish Pandey	mrs     x8, scr_el3
5131cbe42a5SManish Pandey	orr     x8, x8, #SCR_EA_BIT
5141cbe42a5SManish Pandey	msr     scr_el3, x8
5151cbe42a5SManish Pandey	.endm
5161cbe42a5SManish Pandey
5171cbe42a5SManish Pandey	/*
5187d33ffe4SDaniel Boulby	 * Set the PSTATE bits not set when the exception was taken as
5197d33ffe4SDaniel Boulby	 * described in the AArch64.TakeException() pseudocode function
5207d33ffe4SDaniel Boulby	 * in ARM DDI 0487F.c page J1-7635 to a default value.
5217d33ffe4SDaniel Boulby	 */
5227d33ffe4SDaniel Boulby	.macro set_unset_pstate_bits
5237d33ffe4SDaniel Boulby	/*
5247d33ffe4SDaniel Boulby	 * If Data Independent Timing (DIT) functionality is implemented,
5257d33ffe4SDaniel Boulby	 * always enable DIT in EL3
5267d33ffe4SDaniel Boulby	 */
5277d33ffe4SDaniel Boulby#if ENABLE_FEAT_DIT
52888727fc3SAndre Przywara#if ENABLE_FEAT_DIT == 2
52988727fc3SAndre Przywara	mrs	x8, id_aa64pfr0_el1
53088727fc3SAndre Przywara	and	x8, x8, #(ID_AA64PFR0_DIT_MASK << ID_AA64PFR0_DIT_SHIFT)
53188727fc3SAndre Przywara	cbz	x8, 1f
53288727fc3SAndre Przywara#endif
5337d33ffe4SDaniel Boulby	mov     x8, #DIT_BIT
5347d33ffe4SDaniel Boulby	msr     DIT, x8
53588727fc3SAndre Przywara1:
5367d33ffe4SDaniel Boulby#endif /* ENABLE_FEAT_DIT */
5377d33ffe4SDaniel Boulby	.endm /* set_unset_pstate_bits */
5387d33ffe4SDaniel Boulby
539ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
54097215e0fSDaniel Boulby * The following macro is used to save and restore all the general
541ed108b56SAlexei Fedorov * purpose and ARMv8.3-PAuth (if enabled) registers.
542d64bfef5SJayanth Dodderi Chidanand * It also checks if the Secure Cycle Counter (PMCCNTR_EL0)
543d64bfef5SJayanth Dodderi Chidanand * is disabled in EL3/Secure (ARMv8.5-PMU), wherein PMCCNTR_EL0
544d64bfef5SJayanth Dodderi Chidanand * needs not to be saved/restored during world switch.
545ed108b56SAlexei Fedorov *
546ed108b56SAlexei Fedorov * Ideally we would only save and restore the callee saved registers
547ed108b56SAlexei Fedorov * when a world switch occurs but that type of implementation is more
548ed108b56SAlexei Fedorov * complex. So currently we will always save and restore these
549ed108b56SAlexei Fedorov * registers on entry and exit of EL3.
550532ed618SSoby Mathew * clobbers: x18
551ed108b56SAlexei Fedorov * ------------------------------------------------------------------
552532ed618SSoby Mathew */
55397215e0fSDaniel Boulby	.macro save_gp_pmcr_pauth_regs
554532ed618SSoby Mathew	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
555532ed618SSoby Mathew	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
556532ed618SSoby Mathew	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
557532ed618SSoby Mathew	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
558532ed618SSoby Mathew	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
559532ed618SSoby Mathew	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
560532ed618SSoby Mathew	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
561532ed618SSoby Mathew	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
562532ed618SSoby Mathew	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
563532ed618SSoby Mathew	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
564532ed618SSoby Mathew	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
565532ed618SSoby Mathew	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
566532ed618SSoby Mathew	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
567532ed618SSoby Mathew	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
568532ed618SSoby Mathew	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
569532ed618SSoby Mathew	mrs	x18, sp_el0
570532ed618SSoby Mathew	str	x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
571*c73686a1SBoyan Karatotev
572*c73686a1SBoyan Karatotev	/* PMUv3 is presumed to be always present */
573ed108b56SAlexei Fedorov	mrs	x9, pmcr_el0
574ed108b56SAlexei Fedorov	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
575ed108b56SAlexei Fedorov	/* Disable cycle counter when event counting is prohibited */
5761d6d6802SBoyan Karatotev	orr	x9, x9, #PMCR_EL0_DP_BIT
577ed108b56SAlexei Fedorov	msr	pmcr_el0, x9
578ed108b56SAlexei Fedorov	isb
579ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS
580ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
581ed108b56SAlexei Fedorov 	 * Save the ARMv8.3-PAuth keys as they are not banked
582ed108b56SAlexei Fedorov 	 * by exception level
583ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
584ed108b56SAlexei Fedorov	 */
585ed108b56SAlexei Fedorov	add	x19, sp, #CTX_PAUTH_REGS_OFFSET
586ed108b56SAlexei Fedorov
587ed108b56SAlexei Fedorov	mrs	x20, APIAKeyLo_EL1	/* x21:x20 = APIAKey */
588ed108b56SAlexei Fedorov	mrs	x21, APIAKeyHi_EL1
589ed108b56SAlexei Fedorov	mrs	x22, APIBKeyLo_EL1	/* x23:x22 = APIBKey */
590ed108b56SAlexei Fedorov	mrs	x23, APIBKeyHi_EL1
591ed108b56SAlexei Fedorov	mrs	x24, APDAKeyLo_EL1	/* x25:x24 = APDAKey */
592ed108b56SAlexei Fedorov	mrs	x25, APDAKeyHi_EL1
593ed108b56SAlexei Fedorov	mrs	x26, APDBKeyLo_EL1	/* x27:x26 = APDBKey */
594ed108b56SAlexei Fedorov	mrs	x27, APDBKeyHi_EL1
595ed108b56SAlexei Fedorov	mrs	x28, APGAKeyLo_EL1	/* x29:x28 = APGAKey */
596ed108b56SAlexei Fedorov	mrs	x29, APGAKeyHi_EL1
597ed108b56SAlexei Fedorov
598ed108b56SAlexei Fedorov	stp	x20, x21, [x19, #CTX_PACIAKEY_LO]
599ed108b56SAlexei Fedorov	stp	x22, x23, [x19, #CTX_PACIBKEY_LO]
600ed108b56SAlexei Fedorov	stp	x24, x25, [x19, #CTX_PACDAKEY_LO]
601ed108b56SAlexei Fedorov	stp	x26, x27, [x19, #CTX_PACDBKEY_LO]
602ed108b56SAlexei Fedorov	stp	x28, x29, [x19, #CTX_PACGAKEY_LO]
603ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */
60497215e0fSDaniel Boulby	.endm /* save_gp_pmcr_pauth_regs */
60597215e0fSDaniel Boulby
60697215e0fSDaniel Boulby/* -----------------------------------------------------------------
6077d33ffe4SDaniel Boulby * This function saves the context and sets the PSTATE to a known
6087d33ffe4SDaniel Boulby * state, preparing entry to el3.
60997215e0fSDaniel Boulby * Save all the general purpose and ARMv8.3-PAuth (if enabled)
61097215e0fSDaniel Boulby * registers.
6117d33ffe4SDaniel Boulby * Then set any of the PSTATE bits that are not set by hardware
6127d33ffe4SDaniel Boulby * according to the Aarch64.TakeException pseudocode in the Arm
6137d33ffe4SDaniel Boulby * Architecture Reference Manual to a default value for EL3.
6147d33ffe4SDaniel Boulby * clobbers: x17
61597215e0fSDaniel Boulby * -----------------------------------------------------------------
61697215e0fSDaniel Boulby */
61797215e0fSDaniel Boulbyfunc prepare_el3_entry
61897215e0fSDaniel Boulby	save_gp_pmcr_pauth_regs
6191cbe42a5SManish Pandey	enable_serror_at_el3
6207d33ffe4SDaniel Boulby	/*
6217d33ffe4SDaniel Boulby	 * Set the PSTATE bits not described in the Aarch64.TakeException
6227d33ffe4SDaniel Boulby	 * pseudocode to their default values.
6237d33ffe4SDaniel Boulby	 */
6247d33ffe4SDaniel Boulby	set_unset_pstate_bits
625ed108b56SAlexei Fedorov	ret
62697215e0fSDaniel Boulbyendfunc prepare_el3_entry
627ed108b56SAlexei Fedorov
628ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
629ed108b56SAlexei Fedorov * This function restores ARMv8.3-PAuth (if enabled) and all general
630ed108b56SAlexei Fedorov * purpose registers except x30 from the CPU context.
631ed108b56SAlexei Fedorov * x30 register must be explicitly restored by the caller.
632ed108b56SAlexei Fedorov * ------------------------------------------------------------------
633ed108b56SAlexei Fedorov */
634ed108b56SAlexei Fedorovfunc restore_gp_pmcr_pauth_regs
635ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS
636ed108b56SAlexei Fedorov 	/* Restore the ARMv8.3 PAuth keys */
637ed108b56SAlexei Fedorov	add	x10, sp, #CTX_PAUTH_REGS_OFFSET
638ed108b56SAlexei Fedorov
639ed108b56SAlexei Fedorov	ldp	x0, x1, [x10, #CTX_PACIAKEY_LO]	/* x1:x0 = APIAKey */
640ed108b56SAlexei Fedorov	ldp	x2, x3, [x10, #CTX_PACIBKEY_LO]	/* x3:x2 = APIBKey */
641ed108b56SAlexei Fedorov	ldp	x4, x5, [x10, #CTX_PACDAKEY_LO]	/* x5:x4 = APDAKey */
642ed108b56SAlexei Fedorov	ldp	x6, x7, [x10, #CTX_PACDBKEY_LO]	/* x7:x6 = APDBKey */
643ed108b56SAlexei Fedorov	ldp	x8, x9, [x10, #CTX_PACGAKEY_LO]	/* x9:x8 = APGAKey */
644ed108b56SAlexei Fedorov
645ed108b56SAlexei Fedorov	msr	APIAKeyLo_EL1, x0
646ed108b56SAlexei Fedorov	msr	APIAKeyHi_EL1, x1
647ed108b56SAlexei Fedorov	msr	APIBKeyLo_EL1, x2
648ed108b56SAlexei Fedorov	msr	APIBKeyHi_EL1, x3
649ed108b56SAlexei Fedorov	msr	APDAKeyLo_EL1, x4
650ed108b56SAlexei Fedorov	msr	APDAKeyHi_EL1, x5
651ed108b56SAlexei Fedorov	msr	APDBKeyLo_EL1, x6
652ed108b56SAlexei Fedorov	msr	APDBKeyHi_EL1, x7
653ed108b56SAlexei Fedorov	msr	APGAKeyLo_EL1, x8
654ed108b56SAlexei Fedorov	msr	APGAKeyHi_EL1, x9
655ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */
656*c73686a1SBoyan Karatotev
657*c73686a1SBoyan Karatotev	/* PMUv3 is presumed to be always present */
658ed108b56SAlexei Fedorov	ldr	x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
659ed108b56SAlexei Fedorov	msr	pmcr_el0, x0
660532ed618SSoby Mathew	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
661532ed618SSoby Mathew	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
662532ed618SSoby Mathew	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
663532ed618SSoby Mathew	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
664532ed618SSoby Mathew	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
665532ed618SSoby Mathew	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
666532ed618SSoby Mathew	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
667532ed618SSoby Mathew	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
668ef653d93SJeenu Viswambharan	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
669532ed618SSoby Mathew	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
670532ed618SSoby Mathew	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
671532ed618SSoby Mathew	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
672532ed618SSoby Mathew	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
673532ed618SSoby Mathew	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
674ef653d93SJeenu Viswambharan	ldr	x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
675ef653d93SJeenu Viswambharan	msr	sp_el0, x28
676532ed618SSoby Mathew	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
677ef653d93SJeenu Viswambharan	ret
678ed108b56SAlexei Fedorovendfunc restore_gp_pmcr_pauth_regs
679ef653d93SJeenu Viswambharan
6803b8456bdSManish V Badarkhe/*
6813b8456bdSManish V Badarkhe * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
6823b8456bdSManish V Badarkhe * registers and update EL1 registers to disable stage1 and stage2
6833b8456bdSManish V Badarkhe * page table walk
6843b8456bdSManish V Badarkhe */
6853b8456bdSManish V Badarkhefunc save_and_update_ptw_el1_sys_regs
6863b8456bdSManish V Badarkhe	/* ----------------------------------------------------------
6873b8456bdSManish V Badarkhe	 * Save only sctlr_el1 and tcr_el1 registers
6883b8456bdSManish V Badarkhe	 * ----------------------------------------------------------
6893b8456bdSManish V Badarkhe	 */
6903b8456bdSManish V Badarkhe	mrs	x29, sctlr_el1
6913b8456bdSManish V Badarkhe	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
6923b8456bdSManish V Badarkhe	mrs	x29, tcr_el1
6933b8456bdSManish V Badarkhe	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
6943b8456bdSManish V Badarkhe
6953b8456bdSManish V Badarkhe	/* ------------------------------------------------------------
6963b8456bdSManish V Badarkhe	 * Must follow below order in order to disable page table
6973b8456bdSManish V Badarkhe	 * walk for lower ELs (EL1 and EL0). First step ensures that
6983b8456bdSManish V Badarkhe	 * page table walk is disabled for stage1 and second step
6993b8456bdSManish V Badarkhe	 * ensures that page table walker should use TCR_EL1.EPDx
7003b8456bdSManish V Badarkhe	 * bits to perform address translation. ISB ensures that CPU
7013b8456bdSManish V Badarkhe	 * does these 2 steps in order.
7023b8456bdSManish V Badarkhe	 *
7033b8456bdSManish V Badarkhe	 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
7043b8456bdSManish V Badarkhe	 *    stage1.
7053b8456bdSManish V Badarkhe	 * 2. Enable MMU bit to avoid identity mapping via stage2
7063b8456bdSManish V Badarkhe	 *    and force TCR_EL1.EPDx to be used by the page table
7073b8456bdSManish V Badarkhe	 *    walker.
7083b8456bdSManish V Badarkhe	 * ------------------------------------------------------------
7093b8456bdSManish V Badarkhe	 */
7103b8456bdSManish V Badarkhe	orr	x29, x29, #(TCR_EPD0_BIT)
7113b8456bdSManish V Badarkhe	orr	x29, x29, #(TCR_EPD1_BIT)
7123b8456bdSManish V Badarkhe	msr	tcr_el1, x29
7133b8456bdSManish V Badarkhe	isb
7143b8456bdSManish V Badarkhe	mrs	x29, sctlr_el1
7153b8456bdSManish V Badarkhe	orr	x29, x29, #SCTLR_M_BIT
7163b8456bdSManish V Badarkhe	msr	sctlr_el1, x29
7173b8456bdSManish V Badarkhe	isb
7183b8456bdSManish V Badarkhe
7193b8456bdSManish V Badarkhe	ret
7203b8456bdSManish V Badarkheendfunc save_and_update_ptw_el1_sys_regs
7213b8456bdSManish V Badarkhe
722ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
723ed108b56SAlexei Fedorov * This routine assumes that the SP_EL3 is pointing to a valid
724ed108b56SAlexei Fedorov * context structure from where the gp regs and other special
725ed108b56SAlexei Fedorov * registers can be retrieved.
726ed108b56SAlexei Fedorov * ------------------------------------------------------------------
727532ed618SSoby Mathew */
728532ed618SSoby Mathewfunc el3_exit
729bb9549baSJan Dabros#if ENABLE_ASSERTIONS
730bb9549baSJan Dabros	/* el3_exit assumes SP_EL0 on entry */
731bb9549baSJan Dabros	mrs	x17, spsel
732bb9549baSJan Dabros	cmp	x17, #MODE_SP_EL0
733bb9549baSJan Dabros	ASM_ASSERT(eq)
7340ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_ASSERTIONS */
735bb9549baSJan Dabros
736ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
737ed108b56SAlexei Fedorov	 * Save the current SP_EL0 i.e. the EL3 runtime stack which
738ed108b56SAlexei Fedorov	 * will be used for handling the next SMC.
739ed108b56SAlexei Fedorov	 * Then switch to SP_EL3.
740ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
741532ed618SSoby Mathew	 */
742532ed618SSoby Mathew	mov	x17, sp
743ed108b56SAlexei Fedorov	msr	spsel, #MODE_SP_ELX
744532ed618SSoby Mathew	str	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
745532ed618SSoby Mathew
7460c5e7d1cSMax Shvetsov#if IMAGE_BL31
7470c5e7d1cSMax Shvetsov	/* ----------------------------------------------------------
74868ac5ed0SArunachalam Ganapathy	 * Restore CPTR_EL3.
7490c5e7d1cSMax Shvetsov	 * ZCR is only restored if SVE is supported and enabled.
7500c5e7d1cSMax Shvetsov	 * Synchronization is required before zcr_el3 is addressed.
7510c5e7d1cSMax Shvetsov	 * ----------------------------------------------------------
7520c5e7d1cSMax Shvetsov	 */
7530c5e7d1cSMax Shvetsov	ldp	x19, x20, [sp, #CTX_EL3STATE_OFFSET + CTX_CPTR_EL3]
7540c5e7d1cSMax Shvetsov	msr	cptr_el3, x19
7550c5e7d1cSMax Shvetsov
7560c5e7d1cSMax Shvetsov	ands	x19, x19, #CPTR_EZ_BIT
7570c5e7d1cSMax Shvetsov	beq	sve_not_enabled
7580c5e7d1cSMax Shvetsov
7590c5e7d1cSMax Shvetsov	isb
7600c5e7d1cSMax Shvetsov	msr	S3_6_C1_C2_0, x20 /* zcr_el3 */
7610c5e7d1cSMax Shvetsovsve_not_enabled:
7620ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 */
7630c5e7d1cSMax Shvetsov
764fe007b2eSDimitris Papastamos#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
765ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
766ed108b56SAlexei Fedorov	 * Restore mitigation state as it was on entry to EL3
767ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
768ed108b56SAlexei Fedorov	 */
769fe007b2eSDimitris Papastamos	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
770ed108b56SAlexei Fedorov	cbz	x17, 1f
771fe007b2eSDimitris Papastamos	blr	x17
7724d1ccf0eSAntonio Nino Diaz1:
7730ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */
7740ce220afSJayanth Dodderi Chidanand
7756503ff29SAndre Przywara/*
7766503ff29SAndre Przywara * This is a hot path, so we don't want to do some actual FEAT_RAS runtime
7776503ff29SAndre Przywara * detection here. The "esb" is a cheaper variant, so using "dsb" in the
7786503ff29SAndre Przywara * ENABLE_FEAT_RAS==2 case is not ideal, but won't hurt.
7796503ff29SAndre Przywara */
7806503ff29SAndre Przywara#if IMAGE_BL31 && ENABLE_FEAT_RAS == 1
781ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
782ed108b56SAlexei Fedorov	 * Issue Error Synchronization Barrier to synchronize SErrors
783ed108b56SAlexei Fedorov	 * before exiting EL3. We're running with EAs unmasked, so
784ed108b56SAlexei Fedorov	 * any synchronized errors would be taken immediately;
785ed108b56SAlexei Fedorov	 * therefore no need to inspect DISR_EL1 register.
786ed108b56SAlexei Fedorov 	 * ----------------------------------------------------------
787ed108b56SAlexei Fedorov	 */
788ed108b56SAlexei Fedorov	esb
789c2d32a5fSMadhukar Pappireddy#else
790c2d32a5fSMadhukar Pappireddy	dsb	sy
7919202d519SManish Pandey#endif /* IMAGE_BL31 && ENABLE_FEAT_RAS */
7920ce220afSJayanth Dodderi Chidanand
793ff1d2ef3SManish Pandey	/* ----------------------------------------------------------
794ff1d2ef3SManish Pandey	 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
795ff1d2ef3SManish Pandey	 * ----------------------------------------------------------
796ff1d2ef3SManish Pandey	 */
797ff1d2ef3SManish Pandey	ldr	x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
798ff1d2ef3SManish Pandey	ldp	x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
799ff1d2ef3SManish Pandey	msr	scr_el3, x18
800ff1d2ef3SManish Pandey	msr	spsr_el3, x16
801ff1d2ef3SManish Pandey	msr	elr_el3, x17
802ff1d2ef3SManish Pandey
803ff1d2ef3SManish Pandey	restore_ptw_el1_sys_regs
804ff1d2ef3SManish Pandey
805ff1d2ef3SManish Pandey	/* ----------------------------------------------------------
806ff1d2ef3SManish Pandey	 * Restore general purpose (including x30), PMCR_EL0 and
807ff1d2ef3SManish Pandey	 * ARMv8.3-PAuth registers.
808ff1d2ef3SManish Pandey	 * Exit EL3 via ERET to a lower exception level.
809ff1d2ef3SManish Pandey 	 * ----------------------------------------------------------
810ff1d2ef3SManish Pandey 	 */
811ff1d2ef3SManish Pandey	bl	restore_gp_pmcr_pauth_regs
812ff1d2ef3SManish Pandey	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
813ff1d2ef3SManish Pandey
814c2d32a5fSMadhukar Pappireddy#ifdef IMAGE_BL31
815c2d32a5fSMadhukar Pappireddy	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
8160ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 */
8170ce220afSJayanth Dodderi Chidanand
818f461fe34SAnthony Steinhauser	exception_return
8195283962eSAntonio Nino Diaz
820532ed618SSoby Mathewendfunc el3_exit
821