xref: /rk3399_ARM-atf/lib/el3_runtime/aarch64/context.S (revision 6503ff2910ae5edba9edc505c8c19dce7be4d45c)
1532ed618SSoby Mathew/*
2ed804406SRohit Mathew * Copyright (c) 2013-2023, Arm Limited and Contributors. All rights reserved.
3532ed618SSoby Mathew *
482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause
5532ed618SSoby Mathew */
6532ed618SSoby Mathew
7532ed618SSoby Mathew#include <arch.h>
8532ed618SSoby Mathew#include <asm_macros.S>
9bb9549baSJan Dabros#include <assert_macros.S>
10532ed618SSoby Mathew#include <context.h>
113b8456bdSManish V Badarkhe#include <el3_common_macros.S>
12532ed618SSoby Mathew
1328f39f02SMax Shvetsov#if CTX_INCLUDE_EL2_REGS
14d20052f3SZelalem Aweke	.global	el2_sysregs_context_save_common
15d20052f3SZelalem Aweke	.global	el2_sysregs_context_restore_common
16d20052f3SZelalem Aweke#if CTX_INCLUDE_MTE_REGS
17d20052f3SZelalem Aweke	.global	el2_sysregs_context_save_mte
18d20052f3SZelalem Aweke	.global	el2_sysregs_context_restore_mte
19d20052f3SZelalem Aweke#endif /* CTX_INCLUDE_MTE_REGS */
200ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_EL2_REGS */
2128f39f02SMax Shvetsov
22532ed618SSoby Mathew	.global	el1_sysregs_context_save
23532ed618SSoby Mathew	.global	el1_sysregs_context_restore
24532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS
25532ed618SSoby Mathew	.global	fpregs_context_save
26532ed618SSoby Mathew	.global	fpregs_context_restore
270ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_FPREGS */
2897215e0fSDaniel Boulby	.global	prepare_el3_entry
29ed108b56SAlexei Fedorov	.global	restore_gp_pmcr_pauth_regs
303b8456bdSManish V Badarkhe	.global save_and_update_ptw_el1_sys_regs
31532ed618SSoby Mathew	.global	el3_exit
32532ed618SSoby Mathew
3328f39f02SMax Shvetsov#if CTX_INCLUDE_EL2_REGS
3428f39f02SMax Shvetsov
3528f39f02SMax Shvetsov/* -----------------------------------------------------
36d20052f3SZelalem Aweke * The following functions strictly follow the AArch64
37a7cf2743SMax Shvetsov * PCS to use x9-x16 (temporary caller-saved registers)
38d20052f3SZelalem Aweke * to save/restore EL2 system register context.
39d20052f3SZelalem Aweke * el2_sysregs_context_save/restore_common functions
40d20052f3SZelalem Aweke * save and restore registers that are common to all
41d20052f3SZelalem Aweke * configurations. The rest of the functions save and
42d20052f3SZelalem Aweke * restore EL2 system registers that are present when a
43d20052f3SZelalem Aweke * particular feature is enabled. All functions assume
44d20052f3SZelalem Aweke * that 'x0' is pointing to a 'el2_sys_regs' structure
45d20052f3SZelalem Aweke * where the register context will be saved/restored.
462825946eSMax Shvetsov *
472825946eSMax Shvetsov * The following registers are not added.
482825946eSMax Shvetsov * AMEVCNTVOFF0<n>_EL2
492825946eSMax Shvetsov * AMEVCNTVOFF1<n>_EL2
502825946eSMax Shvetsov * ICH_AP0R<n>_EL2
512825946eSMax Shvetsov * ICH_AP1R<n>_EL2
522825946eSMax Shvetsov * ICH_LR<n>_EL2
5328f39f02SMax Shvetsov * -----------------------------------------------------
5428f39f02SMax Shvetsov */
55d20052f3SZelalem Awekefunc el2_sysregs_context_save_common
5628f39f02SMax Shvetsov	mrs	x9, actlr_el2
572825946eSMax Shvetsov	mrs	x10, afsr0_el2
582825946eSMax Shvetsov	stp	x9, x10, [x0, #CTX_ACTLR_EL2]
5928f39f02SMax Shvetsov
602825946eSMax Shvetsov	mrs	x11, afsr1_el2
612825946eSMax Shvetsov	mrs	x12, amair_el2
622825946eSMax Shvetsov	stp	x11, x12, [x0, #CTX_AFSR1_EL2]
6328f39f02SMax Shvetsov
642825946eSMax Shvetsov	mrs	x13, cnthctl_el2
65a7cf2743SMax Shvetsov	mrs	x14, cntvoff_el2
662825946eSMax Shvetsov	stp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
6728f39f02SMax Shvetsov
68a7cf2743SMax Shvetsov	mrs	x15, cptr_el2
69a7cf2743SMax Shvetsov	str	x15, [x0, #CTX_CPTR_EL2]
7028f39f02SMax Shvetsov
710f777eabSArunachalam Ganapathy#if CTX_INCLUDE_AARCH32_REGS
72a7cf2743SMax Shvetsov	mrs	x16, dbgvcr32_el2
73a7cf2743SMax Shvetsov	str	x16, [x0, #CTX_DBGVCR32_EL2]
740ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */
7528f39f02SMax Shvetsov
76a7cf2743SMax Shvetsov	mrs	x9, elr_el2
77a7cf2743SMax Shvetsov	mrs	x10, esr_el2
78a7cf2743SMax Shvetsov	stp	x9, x10, [x0, #CTX_ELR_EL2]
7928f39f02SMax Shvetsov
80a7cf2743SMax Shvetsov	mrs	x11, far_el2
81a7cf2743SMax Shvetsov	mrs	x12, hacr_el2
82a7cf2743SMax Shvetsov	stp	x11, x12, [x0, #CTX_FAR_EL2]
8328f39f02SMax Shvetsov
84a7cf2743SMax Shvetsov	mrs	x13, hcr_el2
85a7cf2743SMax Shvetsov	mrs	x14, hpfar_el2
86a7cf2743SMax Shvetsov	stp	x13, x14, [x0, #CTX_HCR_EL2]
8728f39f02SMax Shvetsov
88a7cf2743SMax Shvetsov	mrs	x15, hstr_el2
89a7cf2743SMax Shvetsov	mrs	x16, ICC_SRE_EL2
90a7cf2743SMax Shvetsov	stp	x15, x16, [x0, #CTX_HSTR_EL2]
9128f39f02SMax Shvetsov
92a7cf2743SMax Shvetsov	mrs	x9, ICH_HCR_EL2
93a7cf2743SMax Shvetsov	mrs	x10, ICH_VMCR_EL2
94a7cf2743SMax Shvetsov	stp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
9528f39f02SMax Shvetsov
96a7cf2743SMax Shvetsov	mrs	x11, mair_el2
97a7cf2743SMax Shvetsov	mrs	x12, mdcr_el2
98a7cf2743SMax Shvetsov	stp	x11, x12, [x0, #CTX_MAIR_EL2]
99a7cf2743SMax Shvetsov
100a7cf2743SMax Shvetsov	mrs	x14, sctlr_el2
101a7cf2743SMax Shvetsov	str	x14, [x0, #CTX_SCTLR_EL2]
10228f39f02SMax Shvetsov
103a7cf2743SMax Shvetsov	mrs	x15, spsr_el2
104a7cf2743SMax Shvetsov	mrs	x16, sp_el2
105a7cf2743SMax Shvetsov	stp	x15, x16, [x0, #CTX_SPSR_EL2]
10628f39f02SMax Shvetsov
107a7cf2743SMax Shvetsov	mrs	x9, tcr_el2
108a7cf2743SMax Shvetsov	mrs	x10, tpidr_el2
109a7cf2743SMax Shvetsov	stp	x9, x10, [x0, #CTX_TCR_EL2]
11028f39f02SMax Shvetsov
111a7cf2743SMax Shvetsov	mrs	x11, ttbr0_el2
112a7cf2743SMax Shvetsov	mrs	x12, vbar_el2
113a7cf2743SMax Shvetsov	stp	x11, x12, [x0, #CTX_TTBR0_EL2]
11428f39f02SMax Shvetsov
115a7cf2743SMax Shvetsov	mrs	x13, vmpidr_el2
116a7cf2743SMax Shvetsov	mrs	x14, vpidr_el2
117a7cf2743SMax Shvetsov	stp	x13, x14, [x0, #CTX_VMPIDR_EL2]
11828f39f02SMax Shvetsov
119a7cf2743SMax Shvetsov	mrs	x15, vtcr_el2
120a7cf2743SMax Shvetsov	mrs	x16, vttbr_el2
121a7cf2743SMax Shvetsov	stp	x15, x16, [x0, #CTX_VTCR_EL2]
12228f39f02SMax Shvetsov	ret
123d20052f3SZelalem Awekeendfunc el2_sysregs_context_save_common
12428f39f02SMax Shvetsov
125d20052f3SZelalem Awekefunc el2_sysregs_context_restore_common
1262825946eSMax Shvetsov	ldp	x9, x10, [x0, #CTX_ACTLR_EL2]
12728f39f02SMax Shvetsov	msr	actlr_el2, x9
1282825946eSMax Shvetsov	msr	afsr0_el2, x10
12928f39f02SMax Shvetsov
1302825946eSMax Shvetsov	ldp	x11, x12, [x0, #CTX_AFSR1_EL2]
1312825946eSMax Shvetsov	msr	afsr1_el2, x11
1322825946eSMax Shvetsov	msr	amair_el2, x12
13328f39f02SMax Shvetsov
1342825946eSMax Shvetsov	ldp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
1352825946eSMax Shvetsov	msr	cnthctl_el2, x13
136a7cf2743SMax Shvetsov	msr	cntvoff_el2, x14
13728f39f02SMax Shvetsov
138a7cf2743SMax Shvetsov	ldr	x15, [x0, #CTX_CPTR_EL2]
139a7cf2743SMax Shvetsov	msr	cptr_el2, x15
14028f39f02SMax Shvetsov
1410f777eabSArunachalam Ganapathy#if CTX_INCLUDE_AARCH32_REGS
142a7cf2743SMax Shvetsov	ldr	x16, [x0, #CTX_DBGVCR32_EL2]
143a7cf2743SMax Shvetsov	msr	dbgvcr32_el2, x16
1440ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */
14528f39f02SMax Shvetsov
146a7cf2743SMax Shvetsov	ldp	x9, x10, [x0, #CTX_ELR_EL2]
147a7cf2743SMax Shvetsov	msr	elr_el2, x9
148a7cf2743SMax Shvetsov	msr	esr_el2, x10
14928f39f02SMax Shvetsov
150a7cf2743SMax Shvetsov	ldp	x11, x12, [x0, #CTX_FAR_EL2]
151a7cf2743SMax Shvetsov	msr	far_el2, x11
152a7cf2743SMax Shvetsov	msr	hacr_el2, x12
15328f39f02SMax Shvetsov
154a7cf2743SMax Shvetsov	ldp	x13, x14, [x0, #CTX_HCR_EL2]
155a7cf2743SMax Shvetsov	msr	hcr_el2, x13
156a7cf2743SMax Shvetsov	msr	hpfar_el2, x14
15728f39f02SMax Shvetsov
158a7cf2743SMax Shvetsov	ldp	x15, x16, [x0, #CTX_HSTR_EL2]
159a7cf2743SMax Shvetsov	msr	hstr_el2, x15
160a7cf2743SMax Shvetsov	msr	ICC_SRE_EL2, x16
16128f39f02SMax Shvetsov
162a7cf2743SMax Shvetsov	ldp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
163a7cf2743SMax Shvetsov	msr	ICH_HCR_EL2, x9
164a7cf2743SMax Shvetsov	msr	ICH_VMCR_EL2, x10
165a7cf2743SMax Shvetsov
166a7cf2743SMax Shvetsov	ldp	x11, x12, [x0, #CTX_MAIR_EL2]
167a7cf2743SMax Shvetsov	msr	mair_el2, x11
168a7cf2743SMax Shvetsov	msr	mdcr_el2, x12
16928f39f02SMax Shvetsov
170a7cf2743SMax Shvetsov	ldr	x14, [x0, #CTX_SCTLR_EL2]
171a7cf2743SMax Shvetsov	msr	sctlr_el2, x14
17228f39f02SMax Shvetsov
173a7cf2743SMax Shvetsov	ldp	x15, x16, [x0, #CTX_SPSR_EL2]
174a7cf2743SMax Shvetsov	msr	spsr_el2, x15
175a7cf2743SMax Shvetsov	msr	sp_el2, x16
17628f39f02SMax Shvetsov
177a7cf2743SMax Shvetsov	ldp	x9, x10, [x0, #CTX_TCR_EL2]
178a7cf2743SMax Shvetsov	msr	tcr_el2, x9
179a7cf2743SMax Shvetsov	msr	tpidr_el2, x10
18028f39f02SMax Shvetsov
181a7cf2743SMax Shvetsov	ldp	x11, x12, [x0, #CTX_TTBR0_EL2]
182a7cf2743SMax Shvetsov	msr	ttbr0_el2, x11
183a7cf2743SMax Shvetsov	msr	vbar_el2, x12
18428f39f02SMax Shvetsov
185a7cf2743SMax Shvetsov	ldp	x13, x14, [x0, #CTX_VMPIDR_EL2]
186a7cf2743SMax Shvetsov	msr	vmpidr_el2, x13
187a7cf2743SMax Shvetsov	msr	vpidr_el2, x14
18828f39f02SMax Shvetsov
189a7cf2743SMax Shvetsov	ldp	x15, x16, [x0, #CTX_VTCR_EL2]
190a7cf2743SMax Shvetsov	msr	vtcr_el2, x15
191a7cf2743SMax Shvetsov	msr	vttbr_el2, x16
192d20052f3SZelalem Aweke	ret
193d20052f3SZelalem Awekeendfunc el2_sysregs_context_restore_common
194d20052f3SZelalem Aweke
1952825946eSMax Shvetsov#if CTX_INCLUDE_MTE_REGS
196d20052f3SZelalem Awekefunc el2_sysregs_context_save_mte
197d20052f3SZelalem Aweke	mrs	x9, TFSR_EL2
198d20052f3SZelalem Aweke	str	x9, [x0, #CTX_TFSR_EL2]
199d20052f3SZelalem Aweke	ret
200d20052f3SZelalem Awekeendfunc el2_sysregs_context_save_mte
201d20052f3SZelalem Aweke
202d20052f3SZelalem Awekefunc el2_sysregs_context_restore_mte
203fb2072b0SManish V Badarkhe	ldr	x9, [x0, #CTX_TFSR_EL2]
204fb2072b0SManish V Badarkhe	msr	TFSR_EL2, x9
205d20052f3SZelalem Aweke	ret
206d20052f3SZelalem Awekeendfunc el2_sysregs_context_restore_mte
2070ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_MTE_REGS */
20828f39f02SMax Shvetsov
20928f39f02SMax Shvetsov#endif /* CTX_INCLUDE_EL2_REGS */
21028f39f02SMax Shvetsov
211ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
212ed108b56SAlexei Fedorov * The following function strictly follows the AArch64 PCS to use
213ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers) to save EL1 system
214ed108b56SAlexei Fedorov * register context. It assumes that 'x0' is pointing to a
215ed108b56SAlexei Fedorov * 'el1_sys_regs' structure where the register context will be saved.
216ed108b56SAlexei Fedorov * ------------------------------------------------------------------
217532ed618SSoby Mathew */
218532ed618SSoby Mathewfunc el1_sysregs_context_save
219532ed618SSoby Mathew
220532ed618SSoby Mathew	mrs	x9, spsr_el1
221532ed618SSoby Mathew	mrs	x10, elr_el1
222532ed618SSoby Mathew	stp	x9, x10, [x0, #CTX_SPSR_EL1]
223532ed618SSoby Mathew
2243b8456bdSManish V Badarkhe#if !ERRATA_SPECULATIVE_AT
225532ed618SSoby Mathew	mrs	x15, sctlr_el1
226cb55615cSManish V Badarkhe	mrs	x16, tcr_el1
227532ed618SSoby Mathew	stp	x15, x16, [x0, #CTX_SCTLR_EL1]
2280ce220afSJayanth Dodderi Chidanand#endif /* ERRATA_SPECULATIVE_AT */
229532ed618SSoby Mathew
230532ed618SSoby Mathew	mrs	x17, cpacr_el1
231532ed618SSoby Mathew	mrs	x9, csselr_el1
232532ed618SSoby Mathew	stp	x17, x9, [x0, #CTX_CPACR_EL1]
233532ed618SSoby Mathew
234532ed618SSoby Mathew	mrs	x10, sp_el1
235532ed618SSoby Mathew	mrs	x11, esr_el1
236532ed618SSoby Mathew	stp	x10, x11, [x0, #CTX_SP_EL1]
237532ed618SSoby Mathew
238532ed618SSoby Mathew	mrs	x12, ttbr0_el1
239532ed618SSoby Mathew	mrs	x13, ttbr1_el1
240532ed618SSoby Mathew	stp	x12, x13, [x0, #CTX_TTBR0_EL1]
241532ed618SSoby Mathew
242532ed618SSoby Mathew	mrs	x14, mair_el1
243532ed618SSoby Mathew	mrs	x15, amair_el1
244532ed618SSoby Mathew	stp	x14, x15, [x0, #CTX_MAIR_EL1]
245532ed618SSoby Mathew
246cb55615cSManish V Badarkhe	mrs	x16, actlr_el1
247532ed618SSoby Mathew	mrs	x17, tpidr_el1
248cb55615cSManish V Badarkhe	stp	x16, x17, [x0, #CTX_ACTLR_EL1]
249532ed618SSoby Mathew
250532ed618SSoby Mathew	mrs	x9, tpidr_el0
251532ed618SSoby Mathew	mrs	x10, tpidrro_el0
252532ed618SSoby Mathew	stp	x9, x10, [x0, #CTX_TPIDR_EL0]
253532ed618SSoby Mathew
254532ed618SSoby Mathew	mrs	x13, par_el1
255532ed618SSoby Mathew	mrs	x14, far_el1
256532ed618SSoby Mathew	stp	x13, x14, [x0, #CTX_PAR_EL1]
257532ed618SSoby Mathew
258532ed618SSoby Mathew	mrs	x15, afsr0_el1
259532ed618SSoby Mathew	mrs	x16, afsr1_el1
260532ed618SSoby Mathew	stp	x15, x16, [x0, #CTX_AFSR0_EL1]
261532ed618SSoby Mathew
262532ed618SSoby Mathew	mrs	x17, contextidr_el1
263532ed618SSoby Mathew	mrs	x9, vbar_el1
264532ed618SSoby Mathew	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
265532ed618SSoby Mathew
266532ed618SSoby Mathew	/* Save AArch32 system registers if the build has instructed so */
267532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS
268532ed618SSoby Mathew	mrs	x11, spsr_abt
269532ed618SSoby Mathew	mrs	x12, spsr_und
270532ed618SSoby Mathew	stp	x11, x12, [x0, #CTX_SPSR_ABT]
271532ed618SSoby Mathew
272532ed618SSoby Mathew	mrs	x13, spsr_irq
273532ed618SSoby Mathew	mrs	x14, spsr_fiq
274532ed618SSoby Mathew	stp	x13, x14, [x0, #CTX_SPSR_IRQ]
275532ed618SSoby Mathew
276532ed618SSoby Mathew	mrs	x15, dacr32_el2
277532ed618SSoby Mathew	mrs	x16, ifsr32_el2
278532ed618SSoby Mathew	stp	x15, x16, [x0, #CTX_DACR32_EL2]
2790ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */
280532ed618SSoby Mathew
281532ed618SSoby Mathew	/* Save NS timer registers if the build has instructed so */
282532ed618SSoby Mathew#if NS_TIMER_SWITCH
283532ed618SSoby Mathew	mrs	x10, cntp_ctl_el0
284532ed618SSoby Mathew	mrs	x11, cntp_cval_el0
285532ed618SSoby Mathew	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
286532ed618SSoby Mathew
287532ed618SSoby Mathew	mrs	x12, cntv_ctl_el0
288532ed618SSoby Mathew	mrs	x13, cntv_cval_el0
289532ed618SSoby Mathew	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
290532ed618SSoby Mathew
291532ed618SSoby Mathew	mrs	x14, cntkctl_el1
292532ed618SSoby Mathew	str	x14, [x0, #CTX_CNTKCTL_EL1]
2930ce220afSJayanth Dodderi Chidanand#endif /* NS_TIMER_SWITCH */
294532ed618SSoby Mathew
2959dd94382SJustin Chadwell	/* Save MTE system registers if the build has instructed so */
2969dd94382SJustin Chadwell#if CTX_INCLUDE_MTE_REGS
2979dd94382SJustin Chadwell	mrs	x15, TFSRE0_EL1
2989dd94382SJustin Chadwell	mrs	x16, TFSR_EL1
2999dd94382SJustin Chadwell	stp	x15, x16, [x0, #CTX_TFSRE0_EL1]
3009dd94382SJustin Chadwell
3019dd94382SJustin Chadwell	mrs	x9, RGSR_EL1
3029dd94382SJustin Chadwell	mrs	x10, GCR_EL1
3039dd94382SJustin Chadwell	stp	x9, x10, [x0, #CTX_RGSR_EL1]
3040ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_MTE_REGS */
3059dd94382SJustin Chadwell
306532ed618SSoby Mathew	ret
307532ed618SSoby Mathewendfunc el1_sysregs_context_save
308532ed618SSoby Mathew
309ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
310ed108b56SAlexei Fedorov * The following function strictly follows the AArch64 PCS to use
311ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers) to restore EL1 system
312ed108b56SAlexei Fedorov * register context.  It assumes that 'x0' is pointing to a
313ed108b56SAlexei Fedorov * 'el1_sys_regs' structure from where the register context will be
314ed108b56SAlexei Fedorov * restored
315ed108b56SAlexei Fedorov * ------------------------------------------------------------------
316532ed618SSoby Mathew */
317532ed618SSoby Mathewfunc el1_sysregs_context_restore
318532ed618SSoby Mathew
319532ed618SSoby Mathew	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
320532ed618SSoby Mathew	msr	spsr_el1, x9
321532ed618SSoby Mathew	msr	elr_el1, x10
322532ed618SSoby Mathew
3233b8456bdSManish V Badarkhe#if !ERRATA_SPECULATIVE_AT
324fb2072b0SManish V Badarkhe	ldp	x15, x16, [x0, #CTX_SCTLR_EL1]
325fb2072b0SManish V Badarkhe	msr	sctlr_el1, x15
326cb55615cSManish V Badarkhe	msr	tcr_el1, x16
3270ce220afSJayanth Dodderi Chidanand#endif /* ERRATA_SPECULATIVE_AT */
328532ed618SSoby Mathew
329532ed618SSoby Mathew	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
330532ed618SSoby Mathew	msr	cpacr_el1, x17
331532ed618SSoby Mathew	msr	csselr_el1, x9
332532ed618SSoby Mathew
333532ed618SSoby Mathew	ldp	x10, x11, [x0, #CTX_SP_EL1]
334532ed618SSoby Mathew	msr	sp_el1, x10
335532ed618SSoby Mathew	msr	esr_el1, x11
336532ed618SSoby Mathew
337532ed618SSoby Mathew	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
338532ed618SSoby Mathew	msr	ttbr0_el1, x12
339532ed618SSoby Mathew	msr	ttbr1_el1, x13
340532ed618SSoby Mathew
341532ed618SSoby Mathew	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
342532ed618SSoby Mathew	msr	mair_el1, x14
343532ed618SSoby Mathew	msr	amair_el1, x15
344532ed618SSoby Mathew
345cb55615cSManish V Badarkhe	ldp 	x16, x17, [x0, #CTX_ACTLR_EL1]
346cb55615cSManish V Badarkhe	msr	actlr_el1, x16
347fb2072b0SManish V Badarkhe	msr	tpidr_el1, x17
348532ed618SSoby Mathew
349532ed618SSoby Mathew	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
350532ed618SSoby Mathew	msr	tpidr_el0, x9
351532ed618SSoby Mathew	msr	tpidrro_el0, x10
352532ed618SSoby Mathew
353532ed618SSoby Mathew	ldp	x13, x14, [x0, #CTX_PAR_EL1]
354532ed618SSoby Mathew	msr	par_el1, x13
355532ed618SSoby Mathew	msr	far_el1, x14
356532ed618SSoby Mathew
357532ed618SSoby Mathew	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
358532ed618SSoby Mathew	msr	afsr0_el1, x15
359532ed618SSoby Mathew	msr	afsr1_el1, x16
360532ed618SSoby Mathew
361532ed618SSoby Mathew	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
362532ed618SSoby Mathew	msr	contextidr_el1, x17
363532ed618SSoby Mathew	msr	vbar_el1, x9
364532ed618SSoby Mathew
365532ed618SSoby Mathew	/* Restore AArch32 system registers if the build has instructed so */
366532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS
367532ed618SSoby Mathew	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
368532ed618SSoby Mathew	msr	spsr_abt, x11
369532ed618SSoby Mathew	msr	spsr_und, x12
370532ed618SSoby Mathew
371532ed618SSoby Mathew	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
372532ed618SSoby Mathew	msr	spsr_irq, x13
373532ed618SSoby Mathew	msr	spsr_fiq, x14
374532ed618SSoby Mathew
375532ed618SSoby Mathew	ldp	x15, x16, [x0, #CTX_DACR32_EL2]
376532ed618SSoby Mathew	msr	dacr32_el2, x15
377532ed618SSoby Mathew	msr	ifsr32_el2, x16
3780ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */
3790ce220afSJayanth Dodderi Chidanand
380532ed618SSoby Mathew	/* Restore NS timer registers if the build has instructed so */
381532ed618SSoby Mathew#if NS_TIMER_SWITCH
382532ed618SSoby Mathew	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
383532ed618SSoby Mathew	msr	cntp_ctl_el0, x10
384532ed618SSoby Mathew	msr	cntp_cval_el0, x11
385532ed618SSoby Mathew
386532ed618SSoby Mathew	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
387532ed618SSoby Mathew	msr	cntv_ctl_el0, x12
388532ed618SSoby Mathew	msr	cntv_cval_el0, x13
389532ed618SSoby Mathew
390532ed618SSoby Mathew	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
391532ed618SSoby Mathew	msr	cntkctl_el1, x14
3920ce220afSJayanth Dodderi Chidanand#endif /* NS_TIMER_SWITCH */
3930ce220afSJayanth Dodderi Chidanand
3949dd94382SJustin Chadwell	/* Restore MTE system registers if the build has instructed so */
3959dd94382SJustin Chadwell#if CTX_INCLUDE_MTE_REGS
3969dd94382SJustin Chadwell	ldp	x11, x12, [x0, #CTX_TFSRE0_EL1]
3979dd94382SJustin Chadwell	msr	TFSRE0_EL1, x11
3989dd94382SJustin Chadwell	msr	TFSR_EL1, x12
3999dd94382SJustin Chadwell
4009dd94382SJustin Chadwell	ldp	x13, x14, [x0, #CTX_RGSR_EL1]
4019dd94382SJustin Chadwell	msr	RGSR_EL1, x13
4029dd94382SJustin Chadwell	msr	GCR_EL1, x14
4030ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_MTE_REGS */
404532ed618SSoby Mathew
405532ed618SSoby Mathew	/* No explict ISB required here as ERET covers it */
406532ed618SSoby Mathew	ret
407532ed618SSoby Mathewendfunc el1_sysregs_context_restore
408532ed618SSoby Mathew
409ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
410ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use
411ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
412ed108b56SAlexei Fedorov * to save floating point register context. It assumes that 'x0' is
413ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure where the register context will
414532ed618SSoby Mathew * be saved.
415532ed618SSoby Mathew *
416ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set.
417ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in
418ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared.
419532ed618SSoby Mathew *
420532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world
421ed108b56SAlexei Fedorov * ------------------------------------------------------------------
422532ed618SSoby Mathew */
423532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS
424532ed618SSoby Mathewfunc fpregs_context_save
425532ed618SSoby Mathew	stp	q0, q1, [x0, #CTX_FP_Q0]
426532ed618SSoby Mathew	stp	q2, q3, [x0, #CTX_FP_Q2]
427532ed618SSoby Mathew	stp	q4, q5, [x0, #CTX_FP_Q4]
428532ed618SSoby Mathew	stp	q6, q7, [x0, #CTX_FP_Q6]
429532ed618SSoby Mathew	stp	q8, q9, [x0, #CTX_FP_Q8]
430532ed618SSoby Mathew	stp	q10, q11, [x0, #CTX_FP_Q10]
431532ed618SSoby Mathew	stp	q12, q13, [x0, #CTX_FP_Q12]
432532ed618SSoby Mathew	stp	q14, q15, [x0, #CTX_FP_Q14]
433532ed618SSoby Mathew	stp	q16, q17, [x0, #CTX_FP_Q16]
434532ed618SSoby Mathew	stp	q18, q19, [x0, #CTX_FP_Q18]
435532ed618SSoby Mathew	stp	q20, q21, [x0, #CTX_FP_Q20]
436532ed618SSoby Mathew	stp	q22, q23, [x0, #CTX_FP_Q22]
437532ed618SSoby Mathew	stp	q24, q25, [x0, #CTX_FP_Q24]
438532ed618SSoby Mathew	stp	q26, q27, [x0, #CTX_FP_Q26]
439532ed618SSoby Mathew	stp	q28, q29, [x0, #CTX_FP_Q28]
440532ed618SSoby Mathew	stp	q30, q31, [x0, #CTX_FP_Q30]
441532ed618SSoby Mathew
442532ed618SSoby Mathew	mrs	x9, fpsr
443532ed618SSoby Mathew	str	x9, [x0, #CTX_FP_FPSR]
444532ed618SSoby Mathew
445532ed618SSoby Mathew	mrs	x10, fpcr
446532ed618SSoby Mathew	str	x10, [x0, #CTX_FP_FPCR]
447532ed618SSoby Mathew
44891089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS
44991089f36SDavid Cunado	mrs	x11, fpexc32_el2
45091089f36SDavid Cunado	str	x11, [x0, #CTX_FP_FPEXC32_EL2]
4510ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */
452532ed618SSoby Mathew	ret
453532ed618SSoby Mathewendfunc fpregs_context_save
454532ed618SSoby Mathew
455ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
456ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use x9-x17
457ed108b56SAlexei Fedorov * (temporary caller-saved registers according to AArch64 PCS) to
458ed108b56SAlexei Fedorov * restore floating point register context. It assumes that 'x0' is
459ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure from where the register context
460532ed618SSoby Mathew * will be restored.
461532ed618SSoby Mathew *
462ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set.
463ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in
464ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared.
465532ed618SSoby Mathew *
466532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world
467ed108b56SAlexei Fedorov * ------------------------------------------------------------------
468532ed618SSoby Mathew */
469532ed618SSoby Mathewfunc fpregs_context_restore
470532ed618SSoby Mathew	ldp	q0, q1, [x0, #CTX_FP_Q0]
471532ed618SSoby Mathew	ldp	q2, q3, [x0, #CTX_FP_Q2]
472532ed618SSoby Mathew	ldp	q4, q5, [x0, #CTX_FP_Q4]
473532ed618SSoby Mathew	ldp	q6, q7, [x0, #CTX_FP_Q6]
474532ed618SSoby Mathew	ldp	q8, q9, [x0, #CTX_FP_Q8]
475532ed618SSoby Mathew	ldp	q10, q11, [x0, #CTX_FP_Q10]
476532ed618SSoby Mathew	ldp	q12, q13, [x0, #CTX_FP_Q12]
477532ed618SSoby Mathew	ldp	q14, q15, [x0, #CTX_FP_Q14]
478532ed618SSoby Mathew	ldp	q16, q17, [x0, #CTX_FP_Q16]
479532ed618SSoby Mathew	ldp	q18, q19, [x0, #CTX_FP_Q18]
480532ed618SSoby Mathew	ldp	q20, q21, [x0, #CTX_FP_Q20]
481532ed618SSoby Mathew	ldp	q22, q23, [x0, #CTX_FP_Q22]
482532ed618SSoby Mathew	ldp	q24, q25, [x0, #CTX_FP_Q24]
483532ed618SSoby Mathew	ldp	q26, q27, [x0, #CTX_FP_Q26]
484532ed618SSoby Mathew	ldp	q28, q29, [x0, #CTX_FP_Q28]
485532ed618SSoby Mathew	ldp	q30, q31, [x0, #CTX_FP_Q30]
486532ed618SSoby Mathew
487532ed618SSoby Mathew	ldr	x9, [x0, #CTX_FP_FPSR]
488532ed618SSoby Mathew	msr	fpsr, x9
489532ed618SSoby Mathew
490532ed618SSoby Mathew	ldr	x10, [x0, #CTX_FP_FPCR]
491532ed618SSoby Mathew	msr	fpcr, x10
492532ed618SSoby Mathew
49391089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS
49491089f36SDavid Cunado	ldr	x11, [x0, #CTX_FP_FPEXC32_EL2]
49591089f36SDavid Cunado	msr	fpexc32_el2, x11
4960ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */
4970ce220afSJayanth Dodderi Chidanand
498532ed618SSoby Mathew	/*
499532ed618SSoby Mathew	 * No explict ISB required here as ERET to
500532ed618SSoby Mathew	 * switch to secure EL1 or non-secure world
501532ed618SSoby Mathew	 * covers it
502532ed618SSoby Mathew	 */
503532ed618SSoby Mathew
504532ed618SSoby Mathew	ret
505532ed618SSoby Mathewendfunc fpregs_context_restore
506532ed618SSoby Mathew#endif /* CTX_INCLUDE_FPREGS */
507532ed618SSoby Mathew
5087d33ffe4SDaniel Boulby	/*
5091cbe42a5SManish Pandey	 * Set SCR_EL3.EA bit to enable SErrors at EL3
5101cbe42a5SManish Pandey	 */
5111cbe42a5SManish Pandey	.macro enable_serror_at_el3
5121cbe42a5SManish Pandey	mrs     x8, scr_el3
5131cbe42a5SManish Pandey	orr     x8, x8, #SCR_EA_BIT
5141cbe42a5SManish Pandey	msr     scr_el3, x8
5151cbe42a5SManish Pandey	.endm
5161cbe42a5SManish Pandey
5171cbe42a5SManish Pandey	/*
5187d33ffe4SDaniel Boulby	 * Set the PSTATE bits not set when the exception was taken as
5197d33ffe4SDaniel Boulby	 * described in the AArch64.TakeException() pseudocode function
5207d33ffe4SDaniel Boulby	 * in ARM DDI 0487F.c page J1-7635 to a default value.
5217d33ffe4SDaniel Boulby	 */
5227d33ffe4SDaniel Boulby	.macro set_unset_pstate_bits
5237d33ffe4SDaniel Boulby	/*
5247d33ffe4SDaniel Boulby	 * If Data Independent Timing (DIT) functionality is implemented,
5257d33ffe4SDaniel Boulby	 * always enable DIT in EL3
5267d33ffe4SDaniel Boulby	 */
5277d33ffe4SDaniel Boulby#if ENABLE_FEAT_DIT
52888727fc3SAndre Przywara#if ENABLE_FEAT_DIT == 2
52988727fc3SAndre Przywara	mrs	x8, id_aa64pfr0_el1
53088727fc3SAndre Przywara	and	x8, x8, #(ID_AA64PFR0_DIT_MASK << ID_AA64PFR0_DIT_SHIFT)
53188727fc3SAndre Przywara	cbz	x8, 1f
53288727fc3SAndre Przywara#endif
5337d33ffe4SDaniel Boulby	mov     x8, #DIT_BIT
5347d33ffe4SDaniel Boulby	msr     DIT, x8
53588727fc3SAndre Przywara1:
5367d33ffe4SDaniel Boulby#endif /* ENABLE_FEAT_DIT */
5377d33ffe4SDaniel Boulby	.endm /* set_unset_pstate_bits */
5387d33ffe4SDaniel Boulby
539ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
54097215e0fSDaniel Boulby * The following macro is used to save and restore all the general
541ed108b56SAlexei Fedorov * purpose and ARMv8.3-PAuth (if enabled) registers.
542d64bfef5SJayanth Dodderi Chidanand * It also checks if the Secure Cycle Counter (PMCCNTR_EL0)
543d64bfef5SJayanth Dodderi Chidanand * is disabled in EL3/Secure (ARMv8.5-PMU), wherein PMCCNTR_EL0
544d64bfef5SJayanth Dodderi Chidanand * needs not to be saved/restored during world switch.
545ed108b56SAlexei Fedorov *
546ed108b56SAlexei Fedorov * Ideally we would only save and restore the callee saved registers
547ed108b56SAlexei Fedorov * when a world switch occurs but that type of implementation is more
548ed108b56SAlexei Fedorov * complex. So currently we will always save and restore these
549ed108b56SAlexei Fedorov * registers on entry and exit of EL3.
550532ed618SSoby Mathew * clobbers: x18
551ed108b56SAlexei Fedorov * ------------------------------------------------------------------
552532ed618SSoby Mathew */
55397215e0fSDaniel Boulby	.macro save_gp_pmcr_pauth_regs
554532ed618SSoby Mathew	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
555532ed618SSoby Mathew	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
556532ed618SSoby Mathew	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
557532ed618SSoby Mathew	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
558532ed618SSoby Mathew	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
559532ed618SSoby Mathew	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
560532ed618SSoby Mathew	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
561532ed618SSoby Mathew	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
562532ed618SSoby Mathew	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
563532ed618SSoby Mathew	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
564532ed618SSoby Mathew	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
565532ed618SSoby Mathew	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
566532ed618SSoby Mathew	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
567532ed618SSoby Mathew	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
568532ed618SSoby Mathew	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
569532ed618SSoby Mathew	mrs	x18, sp_el0
570532ed618SSoby Mathew	str	x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
571532ed618SSoby Mathew
572ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
573d64bfef5SJayanth Dodderi Chidanand	 * Check if earlier initialization of MDCR_EL3.SCCD/MCCD to 1
574d64bfef5SJayanth Dodderi Chidanand	 * has failed.
575d64bfef5SJayanth Dodderi Chidanand	 *
576d64bfef5SJayanth Dodderi Chidanand	 * MDCR_EL3:
577d64bfef5SJayanth Dodderi Chidanand	 * MCCD bit set, Prohibits the Cycle Counter PMCCNTR_EL0 from
578d64bfef5SJayanth Dodderi Chidanand	 * counting at EL3.
579d64bfef5SJayanth Dodderi Chidanand	 * SCCD bit set, Secure Cycle Counter Disable. Prohibits PMCCNTR_EL0
580d64bfef5SJayanth Dodderi Chidanand	 * from counting in Secure state.
581d64bfef5SJayanth Dodderi Chidanand	 * If these bits are not set, meaning that FEAT_PMUv3p5/7 is
582d64bfef5SJayanth Dodderi Chidanand	 * not implemented and PMCR_EL0 should be saved in non-secure
583d64bfef5SJayanth Dodderi Chidanand	 * context.
584ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
585ef653d93SJeenu Viswambharan	 */
58612f6c064SAlexei Fedorov	mov_imm	x10, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
587ed108b56SAlexei Fedorov	mrs	x9, mdcr_el3
58812f6c064SAlexei Fedorov	tst	x9, x10
589ed108b56SAlexei Fedorov	bne	1f
590ed108b56SAlexei Fedorov
591d64bfef5SJayanth Dodderi Chidanand	/* ----------------------------------------------------------
592d64bfef5SJayanth Dodderi Chidanand	 * If control reaches here, it ensures the Secure Cycle
593d64bfef5SJayanth Dodderi Chidanand	 * Counter (PMCCNTR_EL0) is not prohibited from counting at
594d64bfef5SJayanth Dodderi Chidanand	 * EL3 and in secure states.
595d64bfef5SJayanth Dodderi Chidanand	 * Henceforth, PMCR_EL0 to be saved before world switch.
596d64bfef5SJayanth Dodderi Chidanand	 * ----------------------------------------------------------
597d64bfef5SJayanth Dodderi Chidanand	 */
598ed108b56SAlexei Fedorov	mrs	x9, pmcr_el0
599ed108b56SAlexei Fedorov
600ed108b56SAlexei Fedorov	/* Check caller's security state */
601ed108b56SAlexei Fedorov	mrs	x10, scr_el3
602ed108b56SAlexei Fedorov	tst	x10, #SCR_NS_BIT
603ed108b56SAlexei Fedorov	beq	2f
604ed108b56SAlexei Fedorov
605ed108b56SAlexei Fedorov	/* Save PMCR_EL0 if called from Non-secure state */
606ed108b56SAlexei Fedorov	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
607ed108b56SAlexei Fedorov
608ed108b56SAlexei Fedorov	/* Disable cycle counter when event counting is prohibited */
609ed108b56SAlexei Fedorov2:	orr	x9, x9, #PMCR_EL0_DP_BIT
610ed108b56SAlexei Fedorov	msr	pmcr_el0, x9
611ed108b56SAlexei Fedorov	isb
612ed108b56SAlexei Fedorov1:
613ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS
614ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
615ed108b56SAlexei Fedorov 	 * Save the ARMv8.3-PAuth keys as they are not banked
616ed108b56SAlexei Fedorov 	 * by exception level
617ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
618ed108b56SAlexei Fedorov	 */
619ed108b56SAlexei Fedorov	add	x19, sp, #CTX_PAUTH_REGS_OFFSET
620ed108b56SAlexei Fedorov
621ed108b56SAlexei Fedorov	mrs	x20, APIAKeyLo_EL1	/* x21:x20 = APIAKey */
622ed108b56SAlexei Fedorov	mrs	x21, APIAKeyHi_EL1
623ed108b56SAlexei Fedorov	mrs	x22, APIBKeyLo_EL1	/* x23:x22 = APIBKey */
624ed108b56SAlexei Fedorov	mrs	x23, APIBKeyHi_EL1
625ed108b56SAlexei Fedorov	mrs	x24, APDAKeyLo_EL1	/* x25:x24 = APDAKey */
626ed108b56SAlexei Fedorov	mrs	x25, APDAKeyHi_EL1
627ed108b56SAlexei Fedorov	mrs	x26, APDBKeyLo_EL1	/* x27:x26 = APDBKey */
628ed108b56SAlexei Fedorov	mrs	x27, APDBKeyHi_EL1
629ed108b56SAlexei Fedorov	mrs	x28, APGAKeyLo_EL1	/* x29:x28 = APGAKey */
630ed108b56SAlexei Fedorov	mrs	x29, APGAKeyHi_EL1
631ed108b56SAlexei Fedorov
632ed108b56SAlexei Fedorov	stp	x20, x21, [x19, #CTX_PACIAKEY_LO]
633ed108b56SAlexei Fedorov	stp	x22, x23, [x19, #CTX_PACIBKEY_LO]
634ed108b56SAlexei Fedorov	stp	x24, x25, [x19, #CTX_PACDAKEY_LO]
635ed108b56SAlexei Fedorov	stp	x26, x27, [x19, #CTX_PACDBKEY_LO]
636ed108b56SAlexei Fedorov	stp	x28, x29, [x19, #CTX_PACGAKEY_LO]
637ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */
63897215e0fSDaniel Boulby	.endm /* save_gp_pmcr_pauth_regs */
63997215e0fSDaniel Boulby
64097215e0fSDaniel Boulby/* -----------------------------------------------------------------
6417d33ffe4SDaniel Boulby * This function saves the context and sets the PSTATE to a known
6427d33ffe4SDaniel Boulby * state, preparing entry to el3.
64397215e0fSDaniel Boulby * Save all the general purpose and ARMv8.3-PAuth (if enabled)
64497215e0fSDaniel Boulby * registers.
6457d33ffe4SDaniel Boulby * Then set any of the PSTATE bits that are not set by hardware
6467d33ffe4SDaniel Boulby * according to the Aarch64.TakeException pseudocode in the Arm
6477d33ffe4SDaniel Boulby * Architecture Reference Manual to a default value for EL3.
6487d33ffe4SDaniel Boulby * clobbers: x17
64997215e0fSDaniel Boulby * -----------------------------------------------------------------
65097215e0fSDaniel Boulby */
65197215e0fSDaniel Boulbyfunc prepare_el3_entry
65297215e0fSDaniel Boulby	save_gp_pmcr_pauth_regs
6531cbe42a5SManish Pandey	enable_serror_at_el3
6547d33ffe4SDaniel Boulby	/*
6557d33ffe4SDaniel Boulby	 * Set the PSTATE bits not described in the Aarch64.TakeException
6567d33ffe4SDaniel Boulby	 * pseudocode to their default values.
6577d33ffe4SDaniel Boulby	 */
6587d33ffe4SDaniel Boulby	set_unset_pstate_bits
659ed108b56SAlexei Fedorov	ret
66097215e0fSDaniel Boulbyendfunc prepare_el3_entry
661ed108b56SAlexei Fedorov
662ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
663ed108b56SAlexei Fedorov * This function restores ARMv8.3-PAuth (if enabled) and all general
664ed108b56SAlexei Fedorov * purpose registers except x30 from the CPU context.
665ed108b56SAlexei Fedorov * x30 register must be explicitly restored by the caller.
666ed108b56SAlexei Fedorov * ------------------------------------------------------------------
667ed108b56SAlexei Fedorov */
668ed108b56SAlexei Fedorovfunc restore_gp_pmcr_pauth_regs
669ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS
670ed108b56SAlexei Fedorov 	/* Restore the ARMv8.3 PAuth keys */
671ed108b56SAlexei Fedorov	add	x10, sp, #CTX_PAUTH_REGS_OFFSET
672ed108b56SAlexei Fedorov
673ed108b56SAlexei Fedorov	ldp	x0, x1, [x10, #CTX_PACIAKEY_LO]	/* x1:x0 = APIAKey */
674ed108b56SAlexei Fedorov	ldp	x2, x3, [x10, #CTX_PACIBKEY_LO]	/* x3:x2 = APIBKey */
675ed108b56SAlexei Fedorov	ldp	x4, x5, [x10, #CTX_PACDAKEY_LO]	/* x5:x4 = APDAKey */
676ed108b56SAlexei Fedorov	ldp	x6, x7, [x10, #CTX_PACDBKEY_LO]	/* x7:x6 = APDBKey */
677ed108b56SAlexei Fedorov	ldp	x8, x9, [x10, #CTX_PACGAKEY_LO]	/* x9:x8 = APGAKey */
678ed108b56SAlexei Fedorov
679ed108b56SAlexei Fedorov	msr	APIAKeyLo_EL1, x0
680ed108b56SAlexei Fedorov	msr	APIAKeyHi_EL1, x1
681ed108b56SAlexei Fedorov	msr	APIBKeyLo_EL1, x2
682ed108b56SAlexei Fedorov	msr	APIBKeyHi_EL1, x3
683ed108b56SAlexei Fedorov	msr	APDAKeyLo_EL1, x4
684ed108b56SAlexei Fedorov	msr	APDAKeyHi_EL1, x5
685ed108b56SAlexei Fedorov	msr	APDBKeyLo_EL1, x6
686ed108b56SAlexei Fedorov	msr	APDBKeyHi_EL1, x7
687ed108b56SAlexei Fedorov	msr	APGAKeyLo_EL1, x8
688ed108b56SAlexei Fedorov	msr	APGAKeyHi_EL1, x9
689ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */
690ed108b56SAlexei Fedorov
691ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
692ed108b56SAlexei Fedorov	 * Restore PMCR_EL0 when returning to Non-secure state if
693ed108b56SAlexei Fedorov	 * Secure Cycle Counter is not disabled in MDCR_EL3 when
694ed108b56SAlexei Fedorov	 * ARMv8.5-PMU is implemented.
695ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
696ed108b56SAlexei Fedorov	 */
697ed108b56SAlexei Fedorov	mrs	x0, scr_el3
698ed108b56SAlexei Fedorov	tst	x0, #SCR_NS_BIT
699ed108b56SAlexei Fedorov	beq	2f
700ed108b56SAlexei Fedorov
701ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
702ed108b56SAlexei Fedorov	 * Back to Non-secure state.
70312f6c064SAlexei Fedorov	 * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1
70412f6c064SAlexei Fedorov	 * failed, meaning that FEAT_PMUv3p5/7 is not implemented and
70512f6c064SAlexei Fedorov	 * PMCR_EL0 should be restored from non-secure context.
706ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
707ed108b56SAlexei Fedorov	 */
70812f6c064SAlexei Fedorov	mov_imm	x1, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
709ed108b56SAlexei Fedorov	mrs	x0, mdcr_el3
71012f6c064SAlexei Fedorov	tst	x0, x1
711ed108b56SAlexei Fedorov	bne	2f
712ed108b56SAlexei Fedorov	ldr	x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
713ed108b56SAlexei Fedorov	msr	pmcr_el0, x0
714ed108b56SAlexei Fedorov2:
715532ed618SSoby Mathew	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
716532ed618SSoby Mathew	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
717532ed618SSoby Mathew	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
718532ed618SSoby Mathew	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
719532ed618SSoby Mathew	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
720532ed618SSoby Mathew	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
721532ed618SSoby Mathew	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
722532ed618SSoby Mathew	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
723ef653d93SJeenu Viswambharan	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
724532ed618SSoby Mathew	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
725532ed618SSoby Mathew	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
726532ed618SSoby Mathew	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
727532ed618SSoby Mathew	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
728532ed618SSoby Mathew	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
729ef653d93SJeenu Viswambharan	ldr	x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
730ef653d93SJeenu Viswambharan	msr	sp_el0, x28
731532ed618SSoby Mathew	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
732ef653d93SJeenu Viswambharan	ret
733ed108b56SAlexei Fedorovendfunc restore_gp_pmcr_pauth_regs
734ef653d93SJeenu Viswambharan
7353b8456bdSManish V Badarkhe/*
7363b8456bdSManish V Badarkhe * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
7373b8456bdSManish V Badarkhe * registers and update EL1 registers to disable stage1 and stage2
7383b8456bdSManish V Badarkhe * page table walk
7393b8456bdSManish V Badarkhe */
7403b8456bdSManish V Badarkhefunc save_and_update_ptw_el1_sys_regs
7413b8456bdSManish V Badarkhe	/* ----------------------------------------------------------
7423b8456bdSManish V Badarkhe	 * Save only sctlr_el1 and tcr_el1 registers
7433b8456bdSManish V Badarkhe	 * ----------------------------------------------------------
7443b8456bdSManish V Badarkhe	 */
7453b8456bdSManish V Badarkhe	mrs	x29, sctlr_el1
7463b8456bdSManish V Badarkhe	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
7473b8456bdSManish V Badarkhe	mrs	x29, tcr_el1
7483b8456bdSManish V Badarkhe	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
7493b8456bdSManish V Badarkhe
7503b8456bdSManish V Badarkhe	/* ------------------------------------------------------------
7513b8456bdSManish V Badarkhe	 * Must follow below order in order to disable page table
7523b8456bdSManish V Badarkhe	 * walk for lower ELs (EL1 and EL0). First step ensures that
7533b8456bdSManish V Badarkhe	 * page table walk is disabled for stage1 and second step
7543b8456bdSManish V Badarkhe	 * ensures that page table walker should use TCR_EL1.EPDx
7553b8456bdSManish V Badarkhe	 * bits to perform address translation. ISB ensures that CPU
7563b8456bdSManish V Badarkhe	 * does these 2 steps in order.
7573b8456bdSManish V Badarkhe	 *
7583b8456bdSManish V Badarkhe	 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
7593b8456bdSManish V Badarkhe	 *    stage1.
7603b8456bdSManish V Badarkhe	 * 2. Enable MMU bit to avoid identity mapping via stage2
7613b8456bdSManish V Badarkhe	 *    and force TCR_EL1.EPDx to be used by the page table
7623b8456bdSManish V Badarkhe	 *    walker.
7633b8456bdSManish V Badarkhe	 * ------------------------------------------------------------
7643b8456bdSManish V Badarkhe	 */
7653b8456bdSManish V Badarkhe	orr	x29, x29, #(TCR_EPD0_BIT)
7663b8456bdSManish V Badarkhe	orr	x29, x29, #(TCR_EPD1_BIT)
7673b8456bdSManish V Badarkhe	msr	tcr_el1, x29
7683b8456bdSManish V Badarkhe	isb
7693b8456bdSManish V Badarkhe	mrs	x29, sctlr_el1
7703b8456bdSManish V Badarkhe	orr	x29, x29, #SCTLR_M_BIT
7713b8456bdSManish V Badarkhe	msr	sctlr_el1, x29
7723b8456bdSManish V Badarkhe	isb
7733b8456bdSManish V Badarkhe
7743b8456bdSManish V Badarkhe	ret
7753b8456bdSManish V Badarkheendfunc save_and_update_ptw_el1_sys_regs
7763b8456bdSManish V Badarkhe
777ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
778ed108b56SAlexei Fedorov * This routine assumes that the SP_EL3 is pointing to a valid
779ed108b56SAlexei Fedorov * context structure from where the gp regs and other special
780ed108b56SAlexei Fedorov * registers can be retrieved.
781ed108b56SAlexei Fedorov * ------------------------------------------------------------------
782532ed618SSoby Mathew */
783532ed618SSoby Mathewfunc el3_exit
784bb9549baSJan Dabros#if ENABLE_ASSERTIONS
785bb9549baSJan Dabros	/* el3_exit assumes SP_EL0 on entry */
786bb9549baSJan Dabros	mrs	x17, spsel
787bb9549baSJan Dabros	cmp	x17, #MODE_SP_EL0
788bb9549baSJan Dabros	ASM_ASSERT(eq)
7890ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_ASSERTIONS */
790bb9549baSJan Dabros
791ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
792ed108b56SAlexei Fedorov	 * Save the current SP_EL0 i.e. the EL3 runtime stack which
793ed108b56SAlexei Fedorov	 * will be used for handling the next SMC.
794ed108b56SAlexei Fedorov	 * Then switch to SP_EL3.
795ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
796532ed618SSoby Mathew	 */
797532ed618SSoby Mathew	mov	x17, sp
798ed108b56SAlexei Fedorov	msr	spsel, #MODE_SP_ELX
799532ed618SSoby Mathew	str	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
800532ed618SSoby Mathew
8010c5e7d1cSMax Shvetsov#if IMAGE_BL31
8020c5e7d1cSMax Shvetsov	/* ----------------------------------------------------------
80368ac5ed0SArunachalam Ganapathy	 * Restore CPTR_EL3.
8040c5e7d1cSMax Shvetsov	 * ZCR is only restored if SVE is supported and enabled.
8050c5e7d1cSMax Shvetsov	 * Synchronization is required before zcr_el3 is addressed.
8060c5e7d1cSMax Shvetsov	 * ----------------------------------------------------------
8070c5e7d1cSMax Shvetsov	 */
8080c5e7d1cSMax Shvetsov	ldp	x19, x20, [sp, #CTX_EL3STATE_OFFSET + CTX_CPTR_EL3]
8090c5e7d1cSMax Shvetsov	msr	cptr_el3, x19
8100c5e7d1cSMax Shvetsov
8110c5e7d1cSMax Shvetsov	ands	x19, x19, #CPTR_EZ_BIT
8120c5e7d1cSMax Shvetsov	beq	sve_not_enabled
8130c5e7d1cSMax Shvetsov
8140c5e7d1cSMax Shvetsov	isb
8150c5e7d1cSMax Shvetsov	msr	S3_6_C1_C2_0, x20 /* zcr_el3 */
8160c5e7d1cSMax Shvetsovsve_not_enabled:
8170ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 */
8180c5e7d1cSMax Shvetsov
819fe007b2eSDimitris Papastamos#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
820ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
821ed108b56SAlexei Fedorov	 * Restore mitigation state as it was on entry to EL3
822ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
823ed108b56SAlexei Fedorov	 */
824fe007b2eSDimitris Papastamos	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
825ed108b56SAlexei Fedorov	cbz	x17, 1f
826fe007b2eSDimitris Papastamos	blr	x17
8274d1ccf0eSAntonio Nino Diaz1:
8280ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */
8290ce220afSJayanth Dodderi Chidanand
830*6503ff29SAndre Przywara/*
831*6503ff29SAndre Przywara * This is a hot path, so we don't want to do some actual FEAT_RAS runtime
832*6503ff29SAndre Przywara * detection here. The "esb" is a cheaper variant, so using "dsb" in the
833*6503ff29SAndre Przywara * ENABLE_FEAT_RAS==2 case is not ideal, but won't hurt.
834*6503ff29SAndre Przywara */
835*6503ff29SAndre Przywara#if IMAGE_BL31 && ENABLE_FEAT_RAS == 1
836ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
837ed108b56SAlexei Fedorov	 * Issue Error Synchronization Barrier to synchronize SErrors
838ed108b56SAlexei Fedorov	 * before exiting EL3. We're running with EAs unmasked, so
839ed108b56SAlexei Fedorov	 * any synchronized errors would be taken immediately;
840ed108b56SAlexei Fedorov	 * therefore no need to inspect DISR_EL1 register.
841ed108b56SAlexei Fedorov 	 * ----------------------------------------------------------
842ed108b56SAlexei Fedorov	 */
843ed108b56SAlexei Fedorov	esb
844c2d32a5fSMadhukar Pappireddy#else
845c2d32a5fSMadhukar Pappireddy	dsb	sy
8469202d519SManish Pandey#endif /* IMAGE_BL31 && ENABLE_FEAT_RAS */
8470ce220afSJayanth Dodderi Chidanand
848ff1d2ef3SManish Pandey	/* ----------------------------------------------------------
849ff1d2ef3SManish Pandey	 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
850ff1d2ef3SManish Pandey	 * ----------------------------------------------------------
851ff1d2ef3SManish Pandey	 */
852ff1d2ef3SManish Pandey	ldr	x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
853ff1d2ef3SManish Pandey	ldp	x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
854ff1d2ef3SManish Pandey	msr	scr_el3, x18
855ff1d2ef3SManish Pandey	msr	spsr_el3, x16
856ff1d2ef3SManish Pandey	msr	elr_el3, x17
857ff1d2ef3SManish Pandey
858ff1d2ef3SManish Pandey	restore_ptw_el1_sys_regs
859ff1d2ef3SManish Pandey
860ff1d2ef3SManish Pandey	/* ----------------------------------------------------------
861ff1d2ef3SManish Pandey	 * Restore general purpose (including x30), PMCR_EL0 and
862ff1d2ef3SManish Pandey	 * ARMv8.3-PAuth registers.
863ff1d2ef3SManish Pandey	 * Exit EL3 via ERET to a lower exception level.
864ff1d2ef3SManish Pandey 	 * ----------------------------------------------------------
865ff1d2ef3SManish Pandey 	 */
866ff1d2ef3SManish Pandey	bl	restore_gp_pmcr_pauth_regs
867ff1d2ef3SManish Pandey	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
868ff1d2ef3SManish Pandey
869c2d32a5fSMadhukar Pappireddy#ifdef IMAGE_BL31
870c2d32a5fSMadhukar Pappireddy	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
8710ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 */
8720ce220afSJayanth Dodderi Chidanand
873f461fe34SAnthony Steinhauser	exception_return
8745283962eSAntonio Nino Diaz
875532ed618SSoby Mathewendfunc el3_exit
876