xref: /rk3399_ARM-atf/lib/el3_runtime/aarch64/context.S (revision 30ee3755d0a6f6e2f5aada0dc4b73ea2c3963eee)
1532ed618SSoby Mathew/*
228f39f02SMax Shvetsov * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
3532ed618SSoby Mathew *
482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause
5532ed618SSoby Mathew */
6532ed618SSoby Mathew
7532ed618SSoby Mathew#include <arch.h>
8532ed618SSoby Mathew#include <asm_macros.S>
9bb9549baSJan Dabros#include <assert_macros.S>
10532ed618SSoby Mathew#include <context.h>
11532ed618SSoby Mathew
1228f39f02SMax Shvetsov#if CTX_INCLUDE_EL2_REGS
1328f39f02SMax Shvetsov	.global	el2_sysregs_context_save
1428f39f02SMax Shvetsov	.global	el2_sysregs_context_restore
1528f39f02SMax Shvetsov#endif
1628f39f02SMax Shvetsov
17532ed618SSoby Mathew	.global	el1_sysregs_context_save
18532ed618SSoby Mathew	.global	el1_sysregs_context_restore
19532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS
20532ed618SSoby Mathew	.global	fpregs_context_save
21532ed618SSoby Mathew	.global	fpregs_context_restore
22532ed618SSoby Mathew#endif
23ed108b56SAlexei Fedorov	.global	save_gp_pmcr_pauth_regs
24ed108b56SAlexei Fedorov	.global	restore_gp_pmcr_pauth_regs
25532ed618SSoby Mathew	.global	el3_exit
26532ed618SSoby Mathew
2728f39f02SMax Shvetsov#if CTX_INCLUDE_EL2_REGS
2828f39f02SMax Shvetsov
2928f39f02SMax Shvetsov/* -----------------------------------------------------
3028f39f02SMax Shvetsov * The following function strictly follows the AArch64
3128f39f02SMax Shvetsov * PCS to use x9-x17 (temporary caller-saved registers)
322825946eSMax Shvetsov * to save EL2 system register context. It assumes that
332825946eSMax Shvetsov * 'x0' is pointing to a 'el2_sys_regs' structure where
3428f39f02SMax Shvetsov * the register context will be saved.
352825946eSMax Shvetsov *
362825946eSMax Shvetsov * The following registers are not added.
372825946eSMax Shvetsov * AMEVCNTVOFF0<n>_EL2
382825946eSMax Shvetsov * AMEVCNTVOFF1<n>_EL2
392825946eSMax Shvetsov * ICH_AP0R<n>_EL2
402825946eSMax Shvetsov * ICH_AP1R<n>_EL2
412825946eSMax Shvetsov * ICH_LR<n>_EL2
4228f39f02SMax Shvetsov * -----------------------------------------------------
4328f39f02SMax Shvetsov */
442825946eSMax Shvetsov
4528f39f02SMax Shvetsovfunc el2_sysregs_context_save
4628f39f02SMax Shvetsov	mrs	x9, actlr_el2
472825946eSMax Shvetsov	mrs	x10, afsr0_el2
482825946eSMax Shvetsov	stp	x9, x10, [x0, #CTX_ACTLR_EL2]
4928f39f02SMax Shvetsov
502825946eSMax Shvetsov	mrs	x11, afsr1_el2
512825946eSMax Shvetsov	mrs	x12, amair_el2
522825946eSMax Shvetsov	stp	x11, x12, [x0, #CTX_AFSR1_EL2]
5328f39f02SMax Shvetsov
542825946eSMax Shvetsov	mrs	x13, cnthctl_el2
552825946eSMax Shvetsov	mrs	x14, cnthp_ctl_el2
562825946eSMax Shvetsov	stp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
5728f39f02SMax Shvetsov
582825946eSMax Shvetsov	mrs	x15, cnthp_cval_el2
592825946eSMax Shvetsov	mrs	x16, cnthp_tval_el2
602825946eSMax Shvetsov	stp	x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
6128f39f02SMax Shvetsov
622825946eSMax Shvetsov	mrs	x17, cntvoff_el2
6328f39f02SMax Shvetsov	mrs	x9, cptr_el2
642825946eSMax Shvetsov	stp	x17, x9, [x0, #CTX_CNTVOFF_EL2]
6528f39f02SMax Shvetsov
662825946eSMax Shvetsov	mrs	x10, dbgvcr32_el2
672825946eSMax Shvetsov	mrs	x11, elr_el2
682825946eSMax Shvetsov	stp	x10, x11, [x0, #CTX_DBGVCR32_EL2]
6928f39f02SMax Shvetsov
702825946eSMax Shvetsov	mrs	x14, esr_el2
712825946eSMax Shvetsov	mrs	x15, far_el2
722825946eSMax Shvetsov	stp	x14, x15, [x0, #CTX_ESR_EL2]
7328f39f02SMax Shvetsov
74*30ee3755SMax Shvetsov	mrs	x16, hacr_el2
75*30ee3755SMax Shvetsov	mrs	x17, hcr_el2
76*30ee3755SMax Shvetsov	stp	x16, x17, [x0, #CTX_HACR_EL2]
7728f39f02SMax Shvetsov
78*30ee3755SMax Shvetsov	mrs	x9, hpfar_el2
79*30ee3755SMax Shvetsov	mrs	x10, hstr_el2
80*30ee3755SMax Shvetsov	stp	x9, x10, [x0, #CTX_HPFAR_EL2]
8128f39f02SMax Shvetsov
82*30ee3755SMax Shvetsov	mrs	x11, ICC_SRE_EL2
83*30ee3755SMax Shvetsov	mrs	x12, ICH_HCR_EL2
84*30ee3755SMax Shvetsov	stp	x11, x12, [x0, #CTX_ICC_SRE_EL2]
8528f39f02SMax Shvetsov
86*30ee3755SMax Shvetsov	mrs	x13, ICH_VMCR_EL2
87*30ee3755SMax Shvetsov	mrs	x14, mair_el2
88*30ee3755SMax Shvetsov	stp	x13, x14, [x0, #CTX_ICH_VMCR_EL2]
8928f39f02SMax Shvetsov
90*30ee3755SMax Shvetsov	mrs	x15, mdcr_el2
91*30ee3755SMax Shvetsov	mrs	x16, PMSCR_EL2
92*30ee3755SMax Shvetsov	stp	x15, x16, [x0, #CTX_MDCR_EL2]
9328f39f02SMax Shvetsov
94*30ee3755SMax Shvetsov	mrs	x17, sctlr_el2
95*30ee3755SMax Shvetsov	mrs	x9, spsr_el2
96*30ee3755SMax Shvetsov	stp	x17, x9, [x0, #CTX_SCTLR_EL2]
9728f39f02SMax Shvetsov
98*30ee3755SMax Shvetsov	mrs	x10, sp_el2
99*30ee3755SMax Shvetsov	mrs	x11, tcr_el2
100*30ee3755SMax Shvetsov	stp	x10, x11, [x0, #CTX_SP_EL2]
10128f39f02SMax Shvetsov
102*30ee3755SMax Shvetsov	mrs	x12, tpidr_el2
103*30ee3755SMax Shvetsov	mrs	x13, ttbr0_el2
104*30ee3755SMax Shvetsov	stp	x12, x13, [x0, #CTX_TPIDR_EL2]
10528f39f02SMax Shvetsov
106*30ee3755SMax Shvetsov	mrs	x14, vbar_el2
107*30ee3755SMax Shvetsov	mrs	x15, vmpidr_el2
108*30ee3755SMax Shvetsov	stp	x14, x15, [x0, #CTX_VBAR_EL2]
10928f39f02SMax Shvetsov
110*30ee3755SMax Shvetsov	mrs	x16, vpidr_el2
111*30ee3755SMax Shvetsov	mrs	x17, vtcr_el2
112*30ee3755SMax Shvetsov	stp	x16, x17, [x0, #CTX_VPIDR_EL2]
11328f39f02SMax Shvetsov
114*30ee3755SMax Shvetsov	mrs	x9, vttbr_el2
115*30ee3755SMax Shvetsov	str	x9, [x0, #CTX_VTTBR_EL2]
11628f39f02SMax Shvetsov
1172825946eSMax Shvetsov#if CTX_INCLUDE_MTE_REGS
118*30ee3755SMax Shvetsov	mrs	x10, TFSR_EL2
119*30ee3755SMax Shvetsov	str	x10, [x0, #CTX_TFSR_EL2]
1202825946eSMax Shvetsov#endif
12128f39f02SMax Shvetsov
1222825946eSMax Shvetsov#if ENABLE_MPAM_FOR_LOWER_ELS
1232825946eSMax Shvetsov	mrs	x9, MPAM2_EL2
1242825946eSMax Shvetsov	mrs	x10, MPAMHCR_EL2
1252825946eSMax Shvetsov	stp	x9, x10, [x0, #CTX_MPAM2_EL2]
1262825946eSMax Shvetsov
1272825946eSMax Shvetsov	mrs	x11, MPAMVPM0_EL2
1282825946eSMax Shvetsov	mrs	x12, MPAMVPM1_EL2
1292825946eSMax Shvetsov	stp	x11, x12, [x0, #CTX_MPAMVPM0_EL2]
1302825946eSMax Shvetsov
1312825946eSMax Shvetsov	mrs	x13, MPAMVPM2_EL2
1322825946eSMax Shvetsov	mrs	x14, MPAMVPM3_EL2
1332825946eSMax Shvetsov	stp	x13, x14, [x0, #CTX_MPAMVPM2_EL2]
1342825946eSMax Shvetsov
1352825946eSMax Shvetsov	mrs	x15, MPAMVPM4_EL2
1362825946eSMax Shvetsov	mrs	x16, MPAMVPM5_EL2
1372825946eSMax Shvetsov	stp	x15, x16, [x0, #CTX_MPAMVPM4_EL2]
1382825946eSMax Shvetsov
1392825946eSMax Shvetsov	mrs	x17, MPAMVPM6_EL2
1402825946eSMax Shvetsov	mrs	x9, MPAMVPM7_EL2
1412825946eSMax Shvetsov	stp	x17, x9, [x0, #CTX_MPAMVPM6_EL2]
1422825946eSMax Shvetsov
1432825946eSMax Shvetsov	mrs	x10, MPAMVPMV_EL2
1442825946eSMax Shvetsov	str	x10, [x0, #CTX_MPAMVPMV_EL2]
1452825946eSMax Shvetsov#endif
1462825946eSMax Shvetsov
1472825946eSMax Shvetsov
1482825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 6)
1492825946eSMax Shvetsov	mrs	x11, HAFGRTR_EL2
1502825946eSMax Shvetsov	mrs	x12, HDFGRTR_EL2
1512825946eSMax Shvetsov	stp	x11, x12, [x0, #CTX_HAFGRTR_EL2]
1522825946eSMax Shvetsov
1532825946eSMax Shvetsov	mrs	x13, HDFGWTR_EL2
1542825946eSMax Shvetsov	mrs	x14, HFGITR_EL2
1552825946eSMax Shvetsov	stp	x13, x14, [x0, #CTX_HDFGWTR_EL2]
1562825946eSMax Shvetsov
1572825946eSMax Shvetsov	mrs	x15, HFGRTR_EL2
1582825946eSMax Shvetsov	mrs	x16, HFGWTR_EL2
1592825946eSMax Shvetsov	stp	x15, x16, [x0, #CTX_HFGRTR_EL2]
1602825946eSMax Shvetsov
1612825946eSMax Shvetsov	mrs	x17, CNTPOFF_EL2
1622825946eSMax Shvetsov	str	x17, [x0, #CTX_CNTPOFF_EL2]
1632825946eSMax Shvetsov#endif
1642825946eSMax Shvetsov
1652825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 4)
1662825946eSMax Shvetsov	mrs	x9, cnthps_ctl_el2
1672825946eSMax Shvetsov	mrs	x10, cnthps_cval_el2
1682825946eSMax Shvetsov	stp	x9, x10, [x0, #CTX_CNTHPS_CTL_EL2]
1692825946eSMax Shvetsov
1702825946eSMax Shvetsov	mrs	x11, cnthps_tval_el2
1712825946eSMax Shvetsov	mrs	x12, cnthvs_ctl_el2
1722825946eSMax Shvetsov	stp	x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2]
1732825946eSMax Shvetsov
1742825946eSMax Shvetsov	mrs	x13, cnthvs_cval_el2
1752825946eSMax Shvetsov	mrs	x14, cnthvs_tval_el2
1762825946eSMax Shvetsov	stp	x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2]
1772825946eSMax Shvetsov
1782825946eSMax Shvetsov	mrs	x15, cnthv_ctl_el2
1792825946eSMax Shvetsov	mrs	x16, cnthv_cval_el2
1802825946eSMax Shvetsov	stp	x15, x16, [x0, #CTX_CNTHV_CTL_EL2]
1812825946eSMax Shvetsov
1822825946eSMax Shvetsov	mrs	x17, cnthv_tval_el2
1832825946eSMax Shvetsov	mrs	x9, contextidr_el2
1842825946eSMax Shvetsov	stp	x17, x9, [x0, #CTX_CNTHV_TVAL_EL2]
1852825946eSMax Shvetsov
1862825946eSMax Shvetsov	mrs	x10, sder32_el2
1872825946eSMax Shvetsov	str	x10, [x0, #CTX_SDER32_EL2]
1882825946eSMax Shvetsov
1892825946eSMax Shvetsov	mrs	x11, ttbr1_el2
1902825946eSMax Shvetsov	str	x11, [x0, #CTX_TTBR1_EL2]
1912825946eSMax Shvetsov
1922825946eSMax Shvetsov	mrs	x12, vdisr_el2
1932825946eSMax Shvetsov	str	x12, [x0, #CTX_VDISR_EL2]
1942825946eSMax Shvetsov
1952825946eSMax Shvetsov	mrs	x13, vncr_el2
1962825946eSMax Shvetsov	str	x13, [x0, #CTX_VNCR_EL2]
1972825946eSMax Shvetsov
1982825946eSMax Shvetsov	mrs	x14, vsesr_el2
1992825946eSMax Shvetsov	str	x14, [x0, #CTX_VSESR_EL2]
2002825946eSMax Shvetsov
2012825946eSMax Shvetsov	mrs	x15, vstcr_el2
2022825946eSMax Shvetsov	str	x15, [x0, #CTX_VSTCR_EL2]
2032825946eSMax Shvetsov
2042825946eSMax Shvetsov	mrs	x16, vsttbr_el2
2052825946eSMax Shvetsov	str	x16, [x0, #CTX_VSTTBR_EL2]
2067f164a83SOlivier Deprez
2077f164a83SOlivier Deprez	mrs	x17, TRFCR_EL2
2087f164a83SOlivier Deprez	str	x17, [x0, #CTX_TRFCR_EL2]
2092825946eSMax Shvetsov#endif
2102825946eSMax Shvetsov
2112825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 5)
2127f164a83SOlivier Deprez	mrs	x9, scxtnum_el2
2137f164a83SOlivier Deprez	str	x9, [x0, #CTX_SCXTNUM_EL2]
2142825946eSMax Shvetsov#endif
21528f39f02SMax Shvetsov
21628f39f02SMax Shvetsov	ret
21728f39f02SMax Shvetsovendfunc el2_sysregs_context_save
21828f39f02SMax Shvetsov
21928f39f02SMax Shvetsov/* -----------------------------------------------------
22028f39f02SMax Shvetsov * The following function strictly follows the AArch64
22128f39f02SMax Shvetsov * PCS to use x9-x17 (temporary caller-saved registers)
2222825946eSMax Shvetsov * to restore EL2 system register context.  It assumes
2232825946eSMax Shvetsov * that 'x0' is pointing to a 'el2_sys_regs' structure
22428f39f02SMax Shvetsov * from where the register context will be restored
2252825946eSMax Shvetsov
2262825946eSMax Shvetsov * The following registers are not restored
2272825946eSMax Shvetsov * AMEVCNTVOFF0<n>_EL2
2282825946eSMax Shvetsov * AMEVCNTVOFF1<n>_EL2
2292825946eSMax Shvetsov * ICH_AP0R<n>_EL2
2302825946eSMax Shvetsov * ICH_AP1R<n>_EL2
2312825946eSMax Shvetsov * ICH_LR<n>_EL2
23228f39f02SMax Shvetsov * -----------------------------------------------------
23328f39f02SMax Shvetsov */
23428f39f02SMax Shvetsovfunc el2_sysregs_context_restore
23528f39f02SMax Shvetsov
23645aecff0SManish V Badarkhe#if ERRATA_SPECULATIVE_AT
23745aecff0SManish V Badarkhe/* Clear EPD0 and EPD1 bit and M bit to disable PTW */
23845aecff0SManish V Badarkhe	mrs	x9, hcr_el2
23945aecff0SManish V Badarkhe	tst	x9, #HCR_E2H_BIT
24045aecff0SManish V Badarkhe	bne	1f
24145aecff0SManish V Badarkhe	mrs	x9, tcr_el2
24245aecff0SManish V Badarkhe	orr	x9, x9, #TCR_EPD0_BIT
24345aecff0SManish V Badarkhe	orr	x9, x9, #TCR_EPD1_BIT
24445aecff0SManish V Badarkhe	msr	tcr_el2, x9
24545aecff0SManish V Badarkhe1:	mrs	x9, sctlr_el2
24645aecff0SManish V Badarkhe	bic	x9, x9, #SCTLR_M_BIT
24745aecff0SManish V Badarkhe	msr	sctlr_el2, x9
24845aecff0SManish V Badarkhe	isb
24945aecff0SManish V Badarkhe#endif
25045aecff0SManish V Badarkhe
2512825946eSMax Shvetsov	ldp	x9, x10, [x0, #CTX_ACTLR_EL2]
25228f39f02SMax Shvetsov	msr	actlr_el2, x9
2532825946eSMax Shvetsov	msr	afsr0_el2, x10
25428f39f02SMax Shvetsov
2552825946eSMax Shvetsov	ldp	x11, x12, [x0, #CTX_AFSR1_EL2]
2562825946eSMax Shvetsov	msr	afsr1_el2, x11
2572825946eSMax Shvetsov	msr	amair_el2, x12
25828f39f02SMax Shvetsov
2592825946eSMax Shvetsov	ldp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
2602825946eSMax Shvetsov	msr	cnthctl_el2, x13
2612825946eSMax Shvetsov	msr	cnthp_ctl_el2, x14
26228f39f02SMax Shvetsov
2632825946eSMax Shvetsov	ldp	x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
2642825946eSMax Shvetsov	msr	cnthp_cval_el2, x15
2652825946eSMax Shvetsov	msr	cnthp_tval_el2, x16
26628f39f02SMax Shvetsov
2672825946eSMax Shvetsov	ldp	x17, x9, [x0, #CTX_CNTVOFF_EL2]
2682825946eSMax Shvetsov	msr	cntvoff_el2, x17
26928f39f02SMax Shvetsov	msr	cptr_el2, x9
27028f39f02SMax Shvetsov
2712825946eSMax Shvetsov	ldp	x10, x11, [x0, #CTX_DBGVCR32_EL2]
2722825946eSMax Shvetsov	msr	dbgvcr32_el2, x10
2732825946eSMax Shvetsov	msr	elr_el2, x11
27428f39f02SMax Shvetsov
2752825946eSMax Shvetsov	ldp	x14, x15, [x0, #CTX_ESR_EL2]
2762825946eSMax Shvetsov	msr	esr_el2, x14
2772825946eSMax Shvetsov	msr	far_el2, x15
27828f39f02SMax Shvetsov
279*30ee3755SMax Shvetsov	ldp	x16, x17, [x0, #CTX_HACR_EL2]
280*30ee3755SMax Shvetsov	msr	hacr_el2, x16
281*30ee3755SMax Shvetsov	msr	hcr_el2, x17
28228f39f02SMax Shvetsov
283*30ee3755SMax Shvetsov	ldp	x9, x10, [x0, #CTX_HPFAR_EL2]
284*30ee3755SMax Shvetsov	msr	hpfar_el2, x9
285*30ee3755SMax Shvetsov	msr	hstr_el2, x10
28628f39f02SMax Shvetsov
287*30ee3755SMax Shvetsov	ldp	x11, x12, [x0, #CTX_ICC_SRE_EL2]
288*30ee3755SMax Shvetsov	msr	ICC_SRE_EL2, x11
289*30ee3755SMax Shvetsov	msr	ICH_HCR_EL2, x12
29028f39f02SMax Shvetsov
291*30ee3755SMax Shvetsov	ldp	x13, x14, [x0, #CTX_ICH_VMCR_EL2]
292*30ee3755SMax Shvetsov	msr	ICH_VMCR_EL2, x13
293*30ee3755SMax Shvetsov	msr	mair_el2, x14
29428f39f02SMax Shvetsov
295*30ee3755SMax Shvetsov	ldp	x15, x16, [x0, #CTX_MDCR_EL2]
296*30ee3755SMax Shvetsov	msr	mdcr_el2, x15
297*30ee3755SMax Shvetsov	msr	PMSCR_EL2, x16
29828f39f02SMax Shvetsov
299*30ee3755SMax Shvetsov	ldp	x17, x9, [x0, #CTX_SPSR_EL2]
300*30ee3755SMax Shvetsov	msr	spsr_el2, x17
301*30ee3755SMax Shvetsov	msr	sp_el2, x9
30228f39f02SMax Shvetsov
303*30ee3755SMax Shvetsov	ldp	x10, x11, [x0, #CTX_TPIDR_EL2]
304*30ee3755SMax Shvetsov	msr	tpidr_el2, x10
305*30ee3755SMax Shvetsov	msr	ttbr0_el2, x11
30628f39f02SMax Shvetsov
307*30ee3755SMax Shvetsov	ldp	x12, x13, [x0, #CTX_VBAR_EL2]
308*30ee3755SMax Shvetsov	msr	vbar_el2, x12
309*30ee3755SMax Shvetsov	msr	vmpidr_el2, x13
31028f39f02SMax Shvetsov
311*30ee3755SMax Shvetsov	ldp	x14, x15, [x0, #CTX_VPIDR_EL2]
312*30ee3755SMax Shvetsov	msr	vpidr_el2, x14
313*30ee3755SMax Shvetsov	msr	vtcr_el2, x15
31428f39f02SMax Shvetsov
315*30ee3755SMax Shvetsov	ldr	x16, [x0, #CTX_VTTBR_EL2]
316*30ee3755SMax Shvetsov	msr	vttbr_el2, x16
31728f39f02SMax Shvetsov
3182825946eSMax Shvetsov#if CTX_INCLUDE_MTE_REGS
319*30ee3755SMax Shvetsov	ldr	x17, [x0, #CTX_TFSR_EL2]
320*30ee3755SMax Shvetsov	msr	TFSR_EL2, x17
3212825946eSMax Shvetsov#endif
32228f39f02SMax Shvetsov
3232825946eSMax Shvetsov#if ENABLE_MPAM_FOR_LOWER_ELS
3242825946eSMax Shvetsov	ldp	x9, x10, [x0, #CTX_MPAM2_EL2]
3252825946eSMax Shvetsov	msr	MPAM2_EL2, x9
3262825946eSMax Shvetsov	msr	MPAMHCR_EL2, x10
3272825946eSMax Shvetsov
3282825946eSMax Shvetsov	ldp	x11, x12, [x0, #CTX_MPAMVPM0_EL2]
3292825946eSMax Shvetsov	msr	MPAMVPM0_EL2, x11
3302825946eSMax Shvetsov	msr	MPAMVPM1_EL2, x12
3312825946eSMax Shvetsov
3322825946eSMax Shvetsov	ldp	x13, x14, [x0, #CTX_MPAMVPM2_EL2]
3332825946eSMax Shvetsov	msr	MPAMVPM2_EL2, x13
3342825946eSMax Shvetsov	msr	MPAMVPM3_EL2, x14
3352825946eSMax Shvetsov
3362825946eSMax Shvetsov	ldp	x15, x16, [x0, #CTX_MPAMVPM4_EL2]
3372825946eSMax Shvetsov	msr	MPAMVPM4_EL2, x15
3382825946eSMax Shvetsov	msr	MPAMVPM5_EL2, x16
3392825946eSMax Shvetsov
3402825946eSMax Shvetsov	ldp	x17, x9, [x0, #CTX_MPAMVPM6_EL2]
3412825946eSMax Shvetsov	msr	MPAMVPM6_EL2, x17
3422825946eSMax Shvetsov	msr	MPAMVPM7_EL2, x9
3432825946eSMax Shvetsov
3442825946eSMax Shvetsov	ldr	x10, [x0, #CTX_MPAMVPMV_EL2]
3452825946eSMax Shvetsov	msr	MPAMVPMV_EL2, x10
3462825946eSMax Shvetsov#endif
3472825946eSMax Shvetsov
3482825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 6)
3492825946eSMax Shvetsov	ldp	x11, x12, [x0, #CTX_HAFGRTR_EL2]
3502825946eSMax Shvetsov	msr	HAFGRTR_EL2, x11
3512825946eSMax Shvetsov	msr	HDFGRTR_EL2, x12
3522825946eSMax Shvetsov
3532825946eSMax Shvetsov	ldp	x13, x14, [x0, #CTX_HDFGWTR_EL2]
3542825946eSMax Shvetsov	msr	HDFGWTR_EL2, x13
3552825946eSMax Shvetsov	msr	HFGITR_EL2, x14
3562825946eSMax Shvetsov
3572825946eSMax Shvetsov	ldp	x15, x16, [x0, #CTX_HFGRTR_EL2]
3582825946eSMax Shvetsov	msr	HFGRTR_EL2, x15
3592825946eSMax Shvetsov	msr	HFGWTR_EL2, x16
3602825946eSMax Shvetsov
3612825946eSMax Shvetsov	ldr	x17, [x0, #CTX_CNTPOFF_EL2]
3622825946eSMax Shvetsov	msr	CNTPOFF_EL2, x17
3632825946eSMax Shvetsov#endif
3642825946eSMax Shvetsov
3652825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 4)
3662825946eSMax Shvetsov	ldp	x9, x10, [x0, #CTX_CNTHPS_CTL_EL2]
3672825946eSMax Shvetsov	msr	cnthps_ctl_el2, x9
3682825946eSMax Shvetsov	msr	cnthps_cval_el2, x10
3692825946eSMax Shvetsov
3702825946eSMax Shvetsov	ldp	x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2]
3712825946eSMax Shvetsov	msr	cnthps_tval_el2, x11
3722825946eSMax Shvetsov	msr	cnthvs_ctl_el2, x12
3732825946eSMax Shvetsov
3742825946eSMax Shvetsov	ldp	x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2]
3752825946eSMax Shvetsov	msr	cnthvs_cval_el2, x13
3762825946eSMax Shvetsov	msr	cnthvs_tval_el2, x14
3772825946eSMax Shvetsov
3782825946eSMax Shvetsov	ldp	x15, x16, [x0, #CTX_CNTHV_CTL_EL2]
3792825946eSMax Shvetsov	msr	cnthv_ctl_el2, x15
3802825946eSMax Shvetsov	msr	cnthv_cval_el2, x16
3812825946eSMax Shvetsov
3822825946eSMax Shvetsov	ldp	x17, x9, [x0, #CTX_CNTHV_TVAL_EL2]
3832825946eSMax Shvetsov	msr	cnthv_tval_el2, x17
3842825946eSMax Shvetsov	msr	contextidr_el2, x9
3852825946eSMax Shvetsov
3862825946eSMax Shvetsov	ldr	x10, [x0, #CTX_SDER32_EL2]
3872825946eSMax Shvetsov	msr	sder32_el2, x10
3882825946eSMax Shvetsov
3892825946eSMax Shvetsov	ldr	x11, [x0, #CTX_TTBR1_EL2]
3902825946eSMax Shvetsov	msr	ttbr1_el2, x11
3912825946eSMax Shvetsov
3922825946eSMax Shvetsov	ldr	x12, [x0, #CTX_VDISR_EL2]
3932825946eSMax Shvetsov	msr	vdisr_el2, x12
3942825946eSMax Shvetsov
3952825946eSMax Shvetsov	ldr	x13, [x0, #CTX_VNCR_EL2]
3962825946eSMax Shvetsov	msr	vncr_el2, x13
3972825946eSMax Shvetsov
3982825946eSMax Shvetsov	ldr	x14, [x0, #CTX_VSESR_EL2]
3992825946eSMax Shvetsov	msr	vsesr_el2, x14
4002825946eSMax Shvetsov
4012825946eSMax Shvetsov	ldr	x15, [x0, #CTX_VSTCR_EL2]
4022825946eSMax Shvetsov	msr	vstcr_el2, x15
4032825946eSMax Shvetsov
4042825946eSMax Shvetsov	ldr	x16, [x0, #CTX_VSTTBR_EL2]
4052825946eSMax Shvetsov	msr	vsttbr_el2, x16
4067f164a83SOlivier Deprez
4077f164a83SOlivier Deprez	ldr	x17, [x0, #CTX_TRFCR_EL2]
4087f164a83SOlivier Deprez	msr	TRFCR_EL2, x17
4092825946eSMax Shvetsov#endif
4102825946eSMax Shvetsov
4112825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 5)
4127f164a83SOlivier Deprez	ldr	x9, [x0, #CTX_SCXTNUM_EL2]
4137f164a83SOlivier Deprez	msr	scxtnum_el2, x9
4142825946eSMax Shvetsov#endif
41528f39f02SMax Shvetsov
41645aecff0SManish V Badarkhe#if ERRATA_SPECULATIVE_AT
41745aecff0SManish V Badarkhe/*
41845aecff0SManish V Badarkhe * Make sure all registers are stored successfully except
41945aecff0SManish V Badarkhe * SCTLR_EL2 and TCR_EL2
42045aecff0SManish V Badarkhe */
42145aecff0SManish V Badarkhe	isb
42245aecff0SManish V Badarkhe#endif
42345aecff0SManish V Badarkhe
42445aecff0SManish V Badarkhe	ldr	x9, [x0, #CTX_SCTLR_EL2]
42545aecff0SManish V Badarkhe	msr	sctlr_el2, x9
42645aecff0SManish V Badarkhe	ldr	x9, [x0, #CTX_TCR_EL2]
42745aecff0SManish V Badarkhe	msr	tcr_el2, x9
42845aecff0SManish V Badarkhe
42928f39f02SMax Shvetsov	ret
43028f39f02SMax Shvetsovendfunc el2_sysregs_context_restore
43128f39f02SMax Shvetsov
43228f39f02SMax Shvetsov#endif /* CTX_INCLUDE_EL2_REGS */
43328f39f02SMax Shvetsov
434ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
435ed108b56SAlexei Fedorov * The following function strictly follows the AArch64 PCS to use
436ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers) to save EL1 system
437ed108b56SAlexei Fedorov * register context. It assumes that 'x0' is pointing to a
438ed108b56SAlexei Fedorov * 'el1_sys_regs' structure where the register context will be saved.
439ed108b56SAlexei Fedorov * ------------------------------------------------------------------
440532ed618SSoby Mathew */
441532ed618SSoby Mathewfunc el1_sysregs_context_save
442532ed618SSoby Mathew
443532ed618SSoby Mathew	mrs	x9, spsr_el1
444532ed618SSoby Mathew	mrs	x10, elr_el1
445532ed618SSoby Mathew	stp	x9, x10, [x0, #CTX_SPSR_EL1]
446532ed618SSoby Mathew
447532ed618SSoby Mathew	mrs	x15, sctlr_el1
448532ed618SSoby Mathew	mrs	x16, actlr_el1
449532ed618SSoby Mathew	stp	x15, x16, [x0, #CTX_SCTLR_EL1]
450532ed618SSoby Mathew
451532ed618SSoby Mathew	mrs	x17, cpacr_el1
452532ed618SSoby Mathew	mrs	x9, csselr_el1
453532ed618SSoby Mathew	stp	x17, x9, [x0, #CTX_CPACR_EL1]
454532ed618SSoby Mathew
455532ed618SSoby Mathew	mrs	x10, sp_el1
456532ed618SSoby Mathew	mrs	x11, esr_el1
457532ed618SSoby Mathew	stp	x10, x11, [x0, #CTX_SP_EL1]
458532ed618SSoby Mathew
459532ed618SSoby Mathew	mrs	x12, ttbr0_el1
460532ed618SSoby Mathew	mrs	x13, ttbr1_el1
461532ed618SSoby Mathew	stp	x12, x13, [x0, #CTX_TTBR0_EL1]
462532ed618SSoby Mathew
463532ed618SSoby Mathew	mrs	x14, mair_el1
464532ed618SSoby Mathew	mrs	x15, amair_el1
465532ed618SSoby Mathew	stp	x14, x15, [x0, #CTX_MAIR_EL1]
466532ed618SSoby Mathew
467532ed618SSoby Mathew	mrs	x16, tcr_el1
468532ed618SSoby Mathew	mrs	x17, tpidr_el1
469532ed618SSoby Mathew	stp	x16, x17, [x0, #CTX_TCR_EL1]
470532ed618SSoby Mathew
471532ed618SSoby Mathew	mrs	x9, tpidr_el0
472532ed618SSoby Mathew	mrs	x10, tpidrro_el0
473532ed618SSoby Mathew	stp	x9, x10, [x0, #CTX_TPIDR_EL0]
474532ed618SSoby Mathew
475532ed618SSoby Mathew	mrs	x13, par_el1
476532ed618SSoby Mathew	mrs	x14, far_el1
477532ed618SSoby Mathew	stp	x13, x14, [x0, #CTX_PAR_EL1]
478532ed618SSoby Mathew
479532ed618SSoby Mathew	mrs	x15, afsr0_el1
480532ed618SSoby Mathew	mrs	x16, afsr1_el1
481532ed618SSoby Mathew	stp	x15, x16, [x0, #CTX_AFSR0_EL1]
482532ed618SSoby Mathew
483532ed618SSoby Mathew	mrs	x17, contextidr_el1
484532ed618SSoby Mathew	mrs	x9, vbar_el1
485532ed618SSoby Mathew	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
486532ed618SSoby Mathew
487532ed618SSoby Mathew	/* Save AArch32 system registers if the build has instructed so */
488532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS
489532ed618SSoby Mathew	mrs	x11, spsr_abt
490532ed618SSoby Mathew	mrs	x12, spsr_und
491532ed618SSoby Mathew	stp	x11, x12, [x0, #CTX_SPSR_ABT]
492532ed618SSoby Mathew
493532ed618SSoby Mathew	mrs	x13, spsr_irq
494532ed618SSoby Mathew	mrs	x14, spsr_fiq
495532ed618SSoby Mathew	stp	x13, x14, [x0, #CTX_SPSR_IRQ]
496532ed618SSoby Mathew
497532ed618SSoby Mathew	mrs	x15, dacr32_el2
498532ed618SSoby Mathew	mrs	x16, ifsr32_el2
499532ed618SSoby Mathew	stp	x15, x16, [x0, #CTX_DACR32_EL2]
500532ed618SSoby Mathew#endif
501532ed618SSoby Mathew
502532ed618SSoby Mathew	/* Save NS timer registers if the build has instructed so */
503532ed618SSoby Mathew#if NS_TIMER_SWITCH
504532ed618SSoby Mathew	mrs	x10, cntp_ctl_el0
505532ed618SSoby Mathew	mrs	x11, cntp_cval_el0
506532ed618SSoby Mathew	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
507532ed618SSoby Mathew
508532ed618SSoby Mathew	mrs	x12, cntv_ctl_el0
509532ed618SSoby Mathew	mrs	x13, cntv_cval_el0
510532ed618SSoby Mathew	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
511532ed618SSoby Mathew
512532ed618SSoby Mathew	mrs	x14, cntkctl_el1
513532ed618SSoby Mathew	str	x14, [x0, #CTX_CNTKCTL_EL1]
514532ed618SSoby Mathew#endif
515532ed618SSoby Mathew
5169dd94382SJustin Chadwell	/* Save MTE system registers if the build has instructed so */
5179dd94382SJustin Chadwell#if CTX_INCLUDE_MTE_REGS
5189dd94382SJustin Chadwell	mrs	x15, TFSRE0_EL1
5199dd94382SJustin Chadwell	mrs	x16, TFSR_EL1
5209dd94382SJustin Chadwell	stp	x15, x16, [x0, #CTX_TFSRE0_EL1]
5219dd94382SJustin Chadwell
5229dd94382SJustin Chadwell	mrs	x9, RGSR_EL1
5239dd94382SJustin Chadwell	mrs	x10, GCR_EL1
5249dd94382SJustin Chadwell	stp	x9, x10, [x0, #CTX_RGSR_EL1]
5259dd94382SJustin Chadwell#endif
5269dd94382SJustin Chadwell
527532ed618SSoby Mathew	ret
528532ed618SSoby Mathewendfunc el1_sysregs_context_save
529532ed618SSoby Mathew
530ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
531ed108b56SAlexei Fedorov * The following function strictly follows the AArch64 PCS to use
532ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers) to restore EL1 system
533ed108b56SAlexei Fedorov * register context.  It assumes that 'x0' is pointing to a
534ed108b56SAlexei Fedorov * 'el1_sys_regs' structure from where the register context will be
535ed108b56SAlexei Fedorov * restored
536ed108b56SAlexei Fedorov * ------------------------------------------------------------------
537532ed618SSoby Mathew */
538532ed618SSoby Mathewfunc el1_sysregs_context_restore
539532ed618SSoby Mathew
54045aecff0SManish V Badarkhe#if ERRATA_SPECULATIVE_AT
54145aecff0SManish V Badarkhe	mrs	x9, tcr_el1
54245aecff0SManish V Badarkhe	orr	x9, x9, #TCR_EPD0_BIT
54345aecff0SManish V Badarkhe	orr	x9, x9, #TCR_EPD1_BIT
54445aecff0SManish V Badarkhe	msr	tcr_el1, x9
54545aecff0SManish V Badarkhe	mrs	x9, sctlr_el1
54645aecff0SManish V Badarkhe	bic	x9, x9, #SCTLR_M_BIT
54745aecff0SManish V Badarkhe	msr	sctlr_el1, x9
54845aecff0SManish V Badarkhe	isb
54945aecff0SManish V Badarkhe#endif
55045aecff0SManish V Badarkhe
551532ed618SSoby Mathew	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
552532ed618SSoby Mathew	msr	spsr_el1, x9
553532ed618SSoby Mathew	msr	elr_el1, x10
554532ed618SSoby Mathew
55545aecff0SManish V Badarkhe	ldr	x16, [x0, #CTX_ACTLR_EL1]
556532ed618SSoby Mathew	msr	actlr_el1, x16
557532ed618SSoby Mathew
558532ed618SSoby Mathew	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
559532ed618SSoby Mathew	msr	cpacr_el1, x17
560532ed618SSoby Mathew	msr	csselr_el1, x9
561532ed618SSoby Mathew
562532ed618SSoby Mathew	ldp	x10, x11, [x0, #CTX_SP_EL1]
563532ed618SSoby Mathew	msr	sp_el1, x10
564532ed618SSoby Mathew	msr	esr_el1, x11
565532ed618SSoby Mathew
566532ed618SSoby Mathew	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
567532ed618SSoby Mathew	msr	ttbr0_el1, x12
568532ed618SSoby Mathew	msr	ttbr1_el1, x13
569532ed618SSoby Mathew
570532ed618SSoby Mathew	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
571532ed618SSoby Mathew	msr	mair_el1, x14
572532ed618SSoby Mathew	msr	amair_el1, x15
573532ed618SSoby Mathew
57445aecff0SManish V Badarkhe	ldr	x16,[x0, #CTX_TPIDR_EL1]
57545aecff0SManish V Badarkhe	msr	tpidr_el1, x16
576532ed618SSoby Mathew
577532ed618SSoby Mathew	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
578532ed618SSoby Mathew	msr	tpidr_el0, x9
579532ed618SSoby Mathew	msr	tpidrro_el0, x10
580532ed618SSoby Mathew
581532ed618SSoby Mathew	ldp	x13, x14, [x0, #CTX_PAR_EL1]
582532ed618SSoby Mathew	msr	par_el1, x13
583532ed618SSoby Mathew	msr	far_el1, x14
584532ed618SSoby Mathew
585532ed618SSoby Mathew	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
586532ed618SSoby Mathew	msr	afsr0_el1, x15
587532ed618SSoby Mathew	msr	afsr1_el1, x16
588532ed618SSoby Mathew
589532ed618SSoby Mathew	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
590532ed618SSoby Mathew	msr	contextidr_el1, x17
591532ed618SSoby Mathew	msr	vbar_el1, x9
592532ed618SSoby Mathew
593532ed618SSoby Mathew	/* Restore AArch32 system registers if the build has instructed so */
594532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS
595532ed618SSoby Mathew	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
596532ed618SSoby Mathew	msr	spsr_abt, x11
597532ed618SSoby Mathew	msr	spsr_und, x12
598532ed618SSoby Mathew
599532ed618SSoby Mathew	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
600532ed618SSoby Mathew	msr	spsr_irq, x13
601532ed618SSoby Mathew	msr	spsr_fiq, x14
602532ed618SSoby Mathew
603532ed618SSoby Mathew	ldp	x15, x16, [x0, #CTX_DACR32_EL2]
604532ed618SSoby Mathew	msr	dacr32_el2, x15
605532ed618SSoby Mathew	msr	ifsr32_el2, x16
606532ed618SSoby Mathew#endif
607532ed618SSoby Mathew	/* Restore NS timer registers if the build has instructed so */
608532ed618SSoby Mathew#if NS_TIMER_SWITCH
609532ed618SSoby Mathew	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
610532ed618SSoby Mathew	msr	cntp_ctl_el0, x10
611532ed618SSoby Mathew	msr	cntp_cval_el0, x11
612532ed618SSoby Mathew
613532ed618SSoby Mathew	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
614532ed618SSoby Mathew	msr	cntv_ctl_el0, x12
615532ed618SSoby Mathew	msr	cntv_cval_el0, x13
616532ed618SSoby Mathew
617532ed618SSoby Mathew	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
618532ed618SSoby Mathew	msr	cntkctl_el1, x14
619532ed618SSoby Mathew#endif
6209dd94382SJustin Chadwell	/* Restore MTE system registers if the build has instructed so */
6219dd94382SJustin Chadwell#if CTX_INCLUDE_MTE_REGS
6229dd94382SJustin Chadwell	ldp	x11, x12, [x0, #CTX_TFSRE0_EL1]
6239dd94382SJustin Chadwell	msr	TFSRE0_EL1, x11
6249dd94382SJustin Chadwell	msr	TFSR_EL1, x12
6259dd94382SJustin Chadwell
6269dd94382SJustin Chadwell	ldp	x13, x14, [x0, #CTX_RGSR_EL1]
6279dd94382SJustin Chadwell	msr	RGSR_EL1, x13
6289dd94382SJustin Chadwell	msr	GCR_EL1, x14
6299dd94382SJustin Chadwell#endif
630532ed618SSoby Mathew
63145aecff0SManish V Badarkhe#if ERRATA_SPECULATIVE_AT
63245aecff0SManish V Badarkhe/*
63345aecff0SManish V Badarkhe * Make sure all registers are stored successfully except
63445aecff0SManish V Badarkhe * SCTLR_EL1 and TCR_EL1
63545aecff0SManish V Badarkhe */
63645aecff0SManish V Badarkhe	isb
63745aecff0SManish V Badarkhe#endif
63845aecff0SManish V Badarkhe
63945aecff0SManish V Badarkhe	ldr	x9, [x0, #CTX_SCTLR_EL1]
64045aecff0SManish V Badarkhe	msr	sctlr_el1, x9
64145aecff0SManish V Badarkhe	ldr	x9, [x0, #CTX_TCR_EL1]
64245aecff0SManish V Badarkhe	msr	tcr_el1, x9
64345aecff0SManish V Badarkhe
644532ed618SSoby Mathew	/* No explict ISB required here as ERET covers it */
645532ed618SSoby Mathew	ret
646532ed618SSoby Mathewendfunc el1_sysregs_context_restore
647532ed618SSoby Mathew
648ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
649ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use
650ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
651ed108b56SAlexei Fedorov * to save floating point register context. It assumes that 'x0' is
652ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure where the register context will
653532ed618SSoby Mathew * be saved.
654532ed618SSoby Mathew *
655ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set.
656ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in
657ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared.
658532ed618SSoby Mathew *
659532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world
660ed108b56SAlexei Fedorov * ------------------------------------------------------------------
661532ed618SSoby Mathew */
662532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS
663532ed618SSoby Mathewfunc fpregs_context_save
664532ed618SSoby Mathew	stp	q0, q1, [x0, #CTX_FP_Q0]
665532ed618SSoby Mathew	stp	q2, q3, [x0, #CTX_FP_Q2]
666532ed618SSoby Mathew	stp	q4, q5, [x0, #CTX_FP_Q4]
667532ed618SSoby Mathew	stp	q6, q7, [x0, #CTX_FP_Q6]
668532ed618SSoby Mathew	stp	q8, q9, [x0, #CTX_FP_Q8]
669532ed618SSoby Mathew	stp	q10, q11, [x0, #CTX_FP_Q10]
670532ed618SSoby Mathew	stp	q12, q13, [x0, #CTX_FP_Q12]
671532ed618SSoby Mathew	stp	q14, q15, [x0, #CTX_FP_Q14]
672532ed618SSoby Mathew	stp	q16, q17, [x0, #CTX_FP_Q16]
673532ed618SSoby Mathew	stp	q18, q19, [x0, #CTX_FP_Q18]
674532ed618SSoby Mathew	stp	q20, q21, [x0, #CTX_FP_Q20]
675532ed618SSoby Mathew	stp	q22, q23, [x0, #CTX_FP_Q22]
676532ed618SSoby Mathew	stp	q24, q25, [x0, #CTX_FP_Q24]
677532ed618SSoby Mathew	stp	q26, q27, [x0, #CTX_FP_Q26]
678532ed618SSoby Mathew	stp	q28, q29, [x0, #CTX_FP_Q28]
679532ed618SSoby Mathew	stp	q30, q31, [x0, #CTX_FP_Q30]
680532ed618SSoby Mathew
681532ed618SSoby Mathew	mrs	x9, fpsr
682532ed618SSoby Mathew	str	x9, [x0, #CTX_FP_FPSR]
683532ed618SSoby Mathew
684532ed618SSoby Mathew	mrs	x10, fpcr
685532ed618SSoby Mathew	str	x10, [x0, #CTX_FP_FPCR]
686532ed618SSoby Mathew
68791089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS
68891089f36SDavid Cunado	mrs	x11, fpexc32_el2
68991089f36SDavid Cunado	str	x11, [x0, #CTX_FP_FPEXC32_EL2]
69091089f36SDavid Cunado#endif
691532ed618SSoby Mathew	ret
692532ed618SSoby Mathewendfunc fpregs_context_save
693532ed618SSoby Mathew
694ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
695ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use x9-x17
696ed108b56SAlexei Fedorov * (temporary caller-saved registers according to AArch64 PCS) to
697ed108b56SAlexei Fedorov * restore floating point register context. It assumes that 'x0' is
698ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure from where the register context
699532ed618SSoby Mathew * will be restored.
700532ed618SSoby Mathew *
701ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set.
702ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in
703ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared.
704532ed618SSoby Mathew *
705532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world
706ed108b56SAlexei Fedorov * ------------------------------------------------------------------
707532ed618SSoby Mathew */
708532ed618SSoby Mathewfunc fpregs_context_restore
709532ed618SSoby Mathew	ldp	q0, q1, [x0, #CTX_FP_Q0]
710532ed618SSoby Mathew	ldp	q2, q3, [x0, #CTX_FP_Q2]
711532ed618SSoby Mathew	ldp	q4, q5, [x0, #CTX_FP_Q4]
712532ed618SSoby Mathew	ldp	q6, q7, [x0, #CTX_FP_Q6]
713532ed618SSoby Mathew	ldp	q8, q9, [x0, #CTX_FP_Q8]
714532ed618SSoby Mathew	ldp	q10, q11, [x0, #CTX_FP_Q10]
715532ed618SSoby Mathew	ldp	q12, q13, [x0, #CTX_FP_Q12]
716532ed618SSoby Mathew	ldp	q14, q15, [x0, #CTX_FP_Q14]
717532ed618SSoby Mathew	ldp	q16, q17, [x0, #CTX_FP_Q16]
718532ed618SSoby Mathew	ldp	q18, q19, [x0, #CTX_FP_Q18]
719532ed618SSoby Mathew	ldp	q20, q21, [x0, #CTX_FP_Q20]
720532ed618SSoby Mathew	ldp	q22, q23, [x0, #CTX_FP_Q22]
721532ed618SSoby Mathew	ldp	q24, q25, [x0, #CTX_FP_Q24]
722532ed618SSoby Mathew	ldp	q26, q27, [x0, #CTX_FP_Q26]
723532ed618SSoby Mathew	ldp	q28, q29, [x0, #CTX_FP_Q28]
724532ed618SSoby Mathew	ldp	q30, q31, [x0, #CTX_FP_Q30]
725532ed618SSoby Mathew
726532ed618SSoby Mathew	ldr	x9, [x0, #CTX_FP_FPSR]
727532ed618SSoby Mathew	msr	fpsr, x9
728532ed618SSoby Mathew
729532ed618SSoby Mathew	ldr	x10, [x0, #CTX_FP_FPCR]
730532ed618SSoby Mathew	msr	fpcr, x10
731532ed618SSoby Mathew
73291089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS
73391089f36SDavid Cunado	ldr	x11, [x0, #CTX_FP_FPEXC32_EL2]
73491089f36SDavid Cunado	msr	fpexc32_el2, x11
73591089f36SDavid Cunado#endif
736532ed618SSoby Mathew	/*
737532ed618SSoby Mathew	 * No explict ISB required here as ERET to
738532ed618SSoby Mathew	 * switch to secure EL1 or non-secure world
739532ed618SSoby Mathew	 * covers it
740532ed618SSoby Mathew	 */
741532ed618SSoby Mathew
742532ed618SSoby Mathew	ret
743532ed618SSoby Mathewendfunc fpregs_context_restore
744532ed618SSoby Mathew#endif /* CTX_INCLUDE_FPREGS */
745532ed618SSoby Mathew
746ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
747ed108b56SAlexei Fedorov * The following function is used to save and restore all the general
748ed108b56SAlexei Fedorov * purpose and ARMv8.3-PAuth (if enabled) registers.
749ed108b56SAlexei Fedorov * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3
750ed108b56SAlexei Fedorov * when ARMv8.5-PMU is implemented, and if called from Non-secure
751ed108b56SAlexei Fedorov * state saves PMCR_EL0 and disables Cycle Counter.
752ed108b56SAlexei Fedorov *
753ed108b56SAlexei Fedorov * Ideally we would only save and restore the callee saved registers
754ed108b56SAlexei Fedorov * when a world switch occurs but that type of implementation is more
755ed108b56SAlexei Fedorov * complex. So currently we will always save and restore these
756ed108b56SAlexei Fedorov * registers on entry and exit of EL3.
757ed108b56SAlexei Fedorov * These are not macros to ensure their invocation fits within the 32
758ed108b56SAlexei Fedorov * instructions per exception vector.
759532ed618SSoby Mathew * clobbers: x18
760ed108b56SAlexei Fedorov * ------------------------------------------------------------------
761532ed618SSoby Mathew */
762ed108b56SAlexei Fedorovfunc save_gp_pmcr_pauth_regs
763532ed618SSoby Mathew	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
764532ed618SSoby Mathew	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
765532ed618SSoby Mathew	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
766532ed618SSoby Mathew	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
767532ed618SSoby Mathew	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
768532ed618SSoby Mathew	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
769532ed618SSoby Mathew	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
770532ed618SSoby Mathew	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
771532ed618SSoby Mathew	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
772532ed618SSoby Mathew	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
773532ed618SSoby Mathew	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
774532ed618SSoby Mathew	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
775532ed618SSoby Mathew	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
776532ed618SSoby Mathew	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
777532ed618SSoby Mathew	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
778532ed618SSoby Mathew	mrs	x18, sp_el0
779532ed618SSoby Mathew	str	x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
780532ed618SSoby Mathew
781ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
782ed108b56SAlexei Fedorov	 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
783ed108b56SAlexei Fedorov	 * meaning that ARMv8-PMU is not implemented and PMCR_EL0
784ed108b56SAlexei Fedorov	 * should be saved in non-secure context.
785ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
786ef653d93SJeenu Viswambharan	 */
787ed108b56SAlexei Fedorov	mrs	x9, mdcr_el3
788ed108b56SAlexei Fedorov	tst	x9, #MDCR_SCCD_BIT
789ed108b56SAlexei Fedorov	bne	1f
790ed108b56SAlexei Fedorov
791ed108b56SAlexei Fedorov	/* Secure Cycle Counter is not disabled */
792ed108b56SAlexei Fedorov	mrs	x9, pmcr_el0
793ed108b56SAlexei Fedorov
794ed108b56SAlexei Fedorov	/* Check caller's security state */
795ed108b56SAlexei Fedorov	mrs	x10, scr_el3
796ed108b56SAlexei Fedorov	tst	x10, #SCR_NS_BIT
797ed108b56SAlexei Fedorov	beq	2f
798ed108b56SAlexei Fedorov
799ed108b56SAlexei Fedorov	/* Save PMCR_EL0 if called from Non-secure state */
800ed108b56SAlexei Fedorov	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
801ed108b56SAlexei Fedorov
802ed108b56SAlexei Fedorov	/* Disable cycle counter when event counting is prohibited */
803ed108b56SAlexei Fedorov2:	orr	x9, x9, #PMCR_EL0_DP_BIT
804ed108b56SAlexei Fedorov	msr	pmcr_el0, x9
805ed108b56SAlexei Fedorov	isb
806ed108b56SAlexei Fedorov1:
807ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS
808ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
809ed108b56SAlexei Fedorov 	 * Save the ARMv8.3-PAuth keys as they are not banked
810ed108b56SAlexei Fedorov 	 * by exception level
811ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
812ed108b56SAlexei Fedorov	 */
813ed108b56SAlexei Fedorov	add	x19, sp, #CTX_PAUTH_REGS_OFFSET
814ed108b56SAlexei Fedorov
815ed108b56SAlexei Fedorov	mrs	x20, APIAKeyLo_EL1	/* x21:x20 = APIAKey */
816ed108b56SAlexei Fedorov	mrs	x21, APIAKeyHi_EL1
817ed108b56SAlexei Fedorov	mrs	x22, APIBKeyLo_EL1	/* x23:x22 = APIBKey */
818ed108b56SAlexei Fedorov	mrs	x23, APIBKeyHi_EL1
819ed108b56SAlexei Fedorov	mrs	x24, APDAKeyLo_EL1	/* x25:x24 = APDAKey */
820ed108b56SAlexei Fedorov	mrs	x25, APDAKeyHi_EL1
821ed108b56SAlexei Fedorov	mrs	x26, APDBKeyLo_EL1	/* x27:x26 = APDBKey */
822ed108b56SAlexei Fedorov	mrs	x27, APDBKeyHi_EL1
823ed108b56SAlexei Fedorov	mrs	x28, APGAKeyLo_EL1	/* x29:x28 = APGAKey */
824ed108b56SAlexei Fedorov	mrs	x29, APGAKeyHi_EL1
825ed108b56SAlexei Fedorov
826ed108b56SAlexei Fedorov	stp	x20, x21, [x19, #CTX_PACIAKEY_LO]
827ed108b56SAlexei Fedorov	stp	x22, x23, [x19, #CTX_PACIBKEY_LO]
828ed108b56SAlexei Fedorov	stp	x24, x25, [x19, #CTX_PACDAKEY_LO]
829ed108b56SAlexei Fedorov	stp	x26, x27, [x19, #CTX_PACDBKEY_LO]
830ed108b56SAlexei Fedorov	stp	x28, x29, [x19, #CTX_PACGAKEY_LO]
831ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */
832ed108b56SAlexei Fedorov
833ed108b56SAlexei Fedorov	ret
834ed108b56SAlexei Fedorovendfunc save_gp_pmcr_pauth_regs
835ed108b56SAlexei Fedorov
836ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
837ed108b56SAlexei Fedorov * This function restores ARMv8.3-PAuth (if enabled) and all general
838ed108b56SAlexei Fedorov * purpose registers except x30 from the CPU context.
839ed108b56SAlexei Fedorov * x30 register must be explicitly restored by the caller.
840ed108b56SAlexei Fedorov * ------------------------------------------------------------------
841ed108b56SAlexei Fedorov */
842ed108b56SAlexei Fedorovfunc restore_gp_pmcr_pauth_regs
843ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS
844ed108b56SAlexei Fedorov 	/* Restore the ARMv8.3 PAuth keys */
845ed108b56SAlexei Fedorov	add	x10, sp, #CTX_PAUTH_REGS_OFFSET
846ed108b56SAlexei Fedorov
847ed108b56SAlexei Fedorov	ldp	x0, x1, [x10, #CTX_PACIAKEY_LO]	/* x1:x0 = APIAKey */
848ed108b56SAlexei Fedorov	ldp	x2, x3, [x10, #CTX_PACIBKEY_LO]	/* x3:x2 = APIBKey */
849ed108b56SAlexei Fedorov	ldp	x4, x5, [x10, #CTX_PACDAKEY_LO]	/* x5:x4 = APDAKey */
850ed108b56SAlexei Fedorov	ldp	x6, x7, [x10, #CTX_PACDBKEY_LO]	/* x7:x6 = APDBKey */
851ed108b56SAlexei Fedorov	ldp	x8, x9, [x10, #CTX_PACGAKEY_LO]	/* x9:x8 = APGAKey */
852ed108b56SAlexei Fedorov
853ed108b56SAlexei Fedorov	msr	APIAKeyLo_EL1, x0
854ed108b56SAlexei Fedorov	msr	APIAKeyHi_EL1, x1
855ed108b56SAlexei Fedorov	msr	APIBKeyLo_EL1, x2
856ed108b56SAlexei Fedorov	msr	APIBKeyHi_EL1, x3
857ed108b56SAlexei Fedorov	msr	APDAKeyLo_EL1, x4
858ed108b56SAlexei Fedorov	msr	APDAKeyHi_EL1, x5
859ed108b56SAlexei Fedorov	msr	APDBKeyLo_EL1, x6
860ed108b56SAlexei Fedorov	msr	APDBKeyHi_EL1, x7
861ed108b56SAlexei Fedorov	msr	APGAKeyLo_EL1, x8
862ed108b56SAlexei Fedorov	msr	APGAKeyHi_EL1, x9
863ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */
864ed108b56SAlexei Fedorov
865ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
866ed108b56SAlexei Fedorov	 * Restore PMCR_EL0 when returning to Non-secure state if
867ed108b56SAlexei Fedorov	 * Secure Cycle Counter is not disabled in MDCR_EL3 when
868ed108b56SAlexei Fedorov	 * ARMv8.5-PMU is implemented.
869ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
870ed108b56SAlexei Fedorov	 */
871ed108b56SAlexei Fedorov	mrs	x0, scr_el3
872ed108b56SAlexei Fedorov	tst	x0, #SCR_NS_BIT
873ed108b56SAlexei Fedorov	beq	2f
874ed108b56SAlexei Fedorov
875ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
876ed108b56SAlexei Fedorov	 * Back to Non-secure state.
877ed108b56SAlexei Fedorov	 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
878ed108b56SAlexei Fedorov	 * meaning that ARMv8-PMU is not implemented and PMCR_EL0
879ed108b56SAlexei Fedorov	 * should be restored from non-secure context.
880ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
881ed108b56SAlexei Fedorov	 */
882ed108b56SAlexei Fedorov	mrs	x0, mdcr_el3
883ed108b56SAlexei Fedorov	tst	x0, #MDCR_SCCD_BIT
884ed108b56SAlexei Fedorov	bne	2f
885ed108b56SAlexei Fedorov	ldr	x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
886ed108b56SAlexei Fedorov	msr	pmcr_el0, x0
887ed108b56SAlexei Fedorov2:
888532ed618SSoby Mathew	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
889532ed618SSoby Mathew	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
890532ed618SSoby Mathew	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
891532ed618SSoby Mathew	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
892532ed618SSoby Mathew	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
893532ed618SSoby Mathew	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
894532ed618SSoby Mathew	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
895532ed618SSoby Mathew	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
896ef653d93SJeenu Viswambharan	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
897532ed618SSoby Mathew	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
898532ed618SSoby Mathew	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
899532ed618SSoby Mathew	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
900532ed618SSoby Mathew	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
901532ed618SSoby Mathew	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
902ef653d93SJeenu Viswambharan	ldr	x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
903ef653d93SJeenu Viswambharan	msr	sp_el0, x28
904532ed618SSoby Mathew	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
905ef653d93SJeenu Viswambharan	ret
906ed108b56SAlexei Fedorovendfunc restore_gp_pmcr_pauth_regs
907ef653d93SJeenu Viswambharan
908ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
909ed108b56SAlexei Fedorov * This routine assumes that the SP_EL3 is pointing to a valid
910ed108b56SAlexei Fedorov * context structure from where the gp regs and other special
911ed108b56SAlexei Fedorov * registers can be retrieved.
912ed108b56SAlexei Fedorov * ------------------------------------------------------------------
913532ed618SSoby Mathew */
914532ed618SSoby Mathewfunc el3_exit
915bb9549baSJan Dabros#if ENABLE_ASSERTIONS
916bb9549baSJan Dabros	/* el3_exit assumes SP_EL0 on entry */
917bb9549baSJan Dabros	mrs	x17, spsel
918bb9549baSJan Dabros	cmp	x17, #MODE_SP_EL0
919bb9549baSJan Dabros	ASM_ASSERT(eq)
920bb9549baSJan Dabros#endif
921bb9549baSJan Dabros
922ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
923ed108b56SAlexei Fedorov	 * Save the current SP_EL0 i.e. the EL3 runtime stack which
924ed108b56SAlexei Fedorov	 * will be used for handling the next SMC.
925ed108b56SAlexei Fedorov	 * Then switch to SP_EL3.
926ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
927532ed618SSoby Mathew	 */
928532ed618SSoby Mathew	mov	x17, sp
929ed108b56SAlexei Fedorov	msr	spsel, #MODE_SP_ELX
930532ed618SSoby Mathew	str	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
931532ed618SSoby Mathew
932ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
933532ed618SSoby Mathew	 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
934ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
935532ed618SSoby Mathew	 */
936532ed618SSoby Mathew	ldr	x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
937532ed618SSoby Mathew	ldp	x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
938532ed618SSoby Mathew	msr	scr_el3, x18
939532ed618SSoby Mathew	msr	spsr_el3, x16
940532ed618SSoby Mathew	msr	elr_el3, x17
941532ed618SSoby Mathew
942fe007b2eSDimitris Papastamos#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
943ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
944ed108b56SAlexei Fedorov	 * Restore mitigation state as it was on entry to EL3
945ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
946ed108b56SAlexei Fedorov	 */
947fe007b2eSDimitris Papastamos	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
948ed108b56SAlexei Fedorov	cbz	x17, 1f
949fe007b2eSDimitris Papastamos	blr	x17
9504d1ccf0eSAntonio Nino Diaz1:
951fe007b2eSDimitris Papastamos#endif
952ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
953ed108b56SAlexei Fedorov	 * Restore general purpose (including x30), PMCR_EL0 and
954ed108b56SAlexei Fedorov	 * ARMv8.3-PAuth registers.
955ed108b56SAlexei Fedorov	 * Exit EL3 via ERET to a lower exception level.
956ed108b56SAlexei Fedorov 	 * ----------------------------------------------------------
957ed108b56SAlexei Fedorov 	 */
958ed108b56SAlexei Fedorov	bl	restore_gp_pmcr_pauth_regs
959ed108b56SAlexei Fedorov	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
960fe007b2eSDimitris Papastamos
961ed108b56SAlexei Fedorov#if IMAGE_BL31 && RAS_EXTENSION
962ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
963ed108b56SAlexei Fedorov	 * Issue Error Synchronization Barrier to synchronize SErrors
964ed108b56SAlexei Fedorov	 * before exiting EL3. We're running with EAs unmasked, so
965ed108b56SAlexei Fedorov	 * any synchronized errors would be taken immediately;
966ed108b56SAlexei Fedorov	 * therefore no need to inspect DISR_EL1 register.
967ed108b56SAlexei Fedorov 	 * ----------------------------------------------------------
968ed108b56SAlexei Fedorov	 */
969ed108b56SAlexei Fedorov	esb
9705283962eSAntonio Nino Diaz#endif
971f461fe34SAnthony Steinhauser	exception_return
9725283962eSAntonio Nino Diaz
973532ed618SSoby Mathewendfunc el3_exit
974