xref: /rk3399_ARM-atf/lib/el3_runtime/aarch64/context.S (revision 7f164a83a9d9cade9917f448c629d85913ff10c6)
1532ed618SSoby Mathew/*
228f39f02SMax Shvetsov * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
3532ed618SSoby Mathew *
482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause
5532ed618SSoby Mathew */
6532ed618SSoby Mathew
7532ed618SSoby Mathew#include <arch.h>
8532ed618SSoby Mathew#include <asm_macros.S>
9bb9549baSJan Dabros#include <assert_macros.S>
10532ed618SSoby Mathew#include <context.h>
11532ed618SSoby Mathew
1228f39f02SMax Shvetsov#if CTX_INCLUDE_EL2_REGS
1328f39f02SMax Shvetsov	.global	el2_sysregs_context_save
1428f39f02SMax Shvetsov	.global	el2_sysregs_context_restore
1528f39f02SMax Shvetsov#endif
1628f39f02SMax Shvetsov
17532ed618SSoby Mathew	.global	el1_sysregs_context_save
18532ed618SSoby Mathew	.global	el1_sysregs_context_restore
19532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS
20532ed618SSoby Mathew	.global	fpregs_context_save
21532ed618SSoby Mathew	.global	fpregs_context_restore
22532ed618SSoby Mathew#endif
23ed108b56SAlexei Fedorov	.global	save_gp_pmcr_pauth_regs
24ed108b56SAlexei Fedorov	.global	restore_gp_pmcr_pauth_regs
25532ed618SSoby Mathew	.global	el3_exit
26532ed618SSoby Mathew
2728f39f02SMax Shvetsov#if CTX_INCLUDE_EL2_REGS
2828f39f02SMax Shvetsov
2928f39f02SMax Shvetsov/* -----------------------------------------------------
3028f39f02SMax Shvetsov * The following function strictly follows the AArch64
3128f39f02SMax Shvetsov * PCS to use x9-x17 (temporary caller-saved registers)
322825946eSMax Shvetsov * to save EL2 system register context. It assumes that
332825946eSMax Shvetsov * 'x0' is pointing to a 'el2_sys_regs' structure where
3428f39f02SMax Shvetsov * the register context will be saved.
352825946eSMax Shvetsov *
362825946eSMax Shvetsov * The following registers are not added.
372825946eSMax Shvetsov * AMEVCNTVOFF0<n>_EL2
382825946eSMax Shvetsov * AMEVCNTVOFF1<n>_EL2
392825946eSMax Shvetsov * ICH_AP0R<n>_EL2
402825946eSMax Shvetsov * ICH_AP1R<n>_EL2
412825946eSMax Shvetsov * ICH_LR<n>_EL2
4228f39f02SMax Shvetsov * -----------------------------------------------------
4328f39f02SMax Shvetsov */
442825946eSMax Shvetsov
4528f39f02SMax Shvetsovfunc el2_sysregs_context_save
4628f39f02SMax Shvetsov	mrs	x9, actlr_el2
472825946eSMax Shvetsov	mrs	x10, afsr0_el2
482825946eSMax Shvetsov	stp	x9, x10, [x0, #CTX_ACTLR_EL2]
4928f39f02SMax Shvetsov
502825946eSMax Shvetsov	mrs	x11, afsr1_el2
512825946eSMax Shvetsov	mrs	x12, amair_el2
522825946eSMax Shvetsov	stp	x11, x12, [x0, #CTX_AFSR1_EL2]
5328f39f02SMax Shvetsov
542825946eSMax Shvetsov	mrs	x13, cnthctl_el2
552825946eSMax Shvetsov	mrs	x14, cnthp_ctl_el2
562825946eSMax Shvetsov	stp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
5728f39f02SMax Shvetsov
582825946eSMax Shvetsov	mrs	x15, cnthp_cval_el2
592825946eSMax Shvetsov	mrs	x16, cnthp_tval_el2
602825946eSMax Shvetsov	stp	x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
6128f39f02SMax Shvetsov
622825946eSMax Shvetsov	mrs	x17, cntvoff_el2
6328f39f02SMax Shvetsov	mrs	x9, cptr_el2
642825946eSMax Shvetsov	stp	x17, x9, [x0, #CTX_CNTVOFF_EL2]
6528f39f02SMax Shvetsov
662825946eSMax Shvetsov	mrs	x10, dbgvcr32_el2
672825946eSMax Shvetsov	mrs	x11, elr_el2
682825946eSMax Shvetsov	stp	x10, x11, [x0, #CTX_DBGVCR32_EL2]
6928f39f02SMax Shvetsov
702825946eSMax Shvetsov	mrs	x14, esr_el2
712825946eSMax Shvetsov	mrs	x15, far_el2
722825946eSMax Shvetsov	stp	x14, x15, [x0, #CTX_ESR_EL2]
7328f39f02SMax Shvetsov
742825946eSMax Shvetsov	mrs	x16, fpexc32_el2
752825946eSMax Shvetsov	mrs	x17, hacr_el2
762825946eSMax Shvetsov	stp	x16, x17, [x0, #CTX_FPEXC32_EL2]
7728f39f02SMax Shvetsov
7828f39f02SMax Shvetsov	mrs	x9, hcr_el2
792825946eSMax Shvetsov	mrs	x10, hpfar_el2
802825946eSMax Shvetsov	stp	x9, x10, [x0, #CTX_HCR_EL2]
8128f39f02SMax Shvetsov
822825946eSMax Shvetsov	mrs	x11, hstr_el2
832825946eSMax Shvetsov	mrs	x12, ICC_SRE_EL2
842825946eSMax Shvetsov	stp	x11, x12, [x0, #CTX_HSTR_EL2]
8528f39f02SMax Shvetsov
862825946eSMax Shvetsov	mrs	x13, ICH_HCR_EL2
872825946eSMax Shvetsov	mrs	x14, ICH_VMCR_EL2
882825946eSMax Shvetsov	stp	x13, x14, [x0, #CTX_ICH_HCR_EL2]
8928f39f02SMax Shvetsov
902825946eSMax Shvetsov	mrs	x15, mair_el2
912825946eSMax Shvetsov	mrs	x16, mdcr_el2
922825946eSMax Shvetsov	stp	x15, x16, [x0, #CTX_MAIR_EL2]
9328f39f02SMax Shvetsov
942825946eSMax Shvetsov	mrs	x17, PMSCR_EL2
9528f39f02SMax Shvetsov	mrs	x9, sctlr_el2
962825946eSMax Shvetsov	stp	x17, x9, [x0, #CTX_PMSCR_EL2]
9728f39f02SMax Shvetsov
982825946eSMax Shvetsov	mrs	x10, spsr_el2
992825946eSMax Shvetsov	mrs	x11, sp_el2
1002825946eSMax Shvetsov	stp	x10, x11, [x0, #CTX_SPSR_EL2]
10128f39f02SMax Shvetsov
1022825946eSMax Shvetsov	mrs	x12, tcr_el2
103*7f164a83SOlivier Deprez	mrs	x13, tpidr_el2
1042825946eSMax Shvetsov	stp	x12, x13, [x0, #CTX_TCR_EL2]
10528f39f02SMax Shvetsov
1062825946eSMax Shvetsov	mrs	x14, ttbr0_el2
1072825946eSMax Shvetsov	mrs	x15, vbar_el2
1082825946eSMax Shvetsov	stp	x14, x15, [x0, #CTX_TTBR0_EL2]
10928f39f02SMax Shvetsov
1102825946eSMax Shvetsov	mrs	x16, vmpidr_el2
1112825946eSMax Shvetsov	mrs	x17, vpidr_el2
1122825946eSMax Shvetsov	stp	x16, x17, [x0, #CTX_VMPIDR_EL2]
11328f39f02SMax Shvetsov
11428f39f02SMax Shvetsov	mrs	x9, vtcr_el2
1152825946eSMax Shvetsov	mrs	x10, vttbr_el2
1162825946eSMax Shvetsov	stp	x9, x10, [x0, #CTX_VTCR_EL2]
11728f39f02SMax Shvetsov
1182825946eSMax Shvetsov#if CTX_INCLUDE_MTE_REGS
1192825946eSMax Shvetsov	mrs	x11, TFSR_EL2
1202825946eSMax Shvetsov	str	x11, [x0, #CTX_TFSR_EL2]
1212825946eSMax Shvetsov#endif
12228f39f02SMax Shvetsov
1232825946eSMax Shvetsov#if ENABLE_MPAM_FOR_LOWER_ELS
1242825946eSMax Shvetsov	mrs	x9, MPAM2_EL2
1252825946eSMax Shvetsov	mrs	x10, MPAMHCR_EL2
1262825946eSMax Shvetsov	stp	x9, x10, [x0, #CTX_MPAM2_EL2]
1272825946eSMax Shvetsov
1282825946eSMax Shvetsov	mrs	x11, MPAMVPM0_EL2
1292825946eSMax Shvetsov	mrs	x12, MPAMVPM1_EL2
1302825946eSMax Shvetsov	stp	x11, x12, [x0, #CTX_MPAMVPM0_EL2]
1312825946eSMax Shvetsov
1322825946eSMax Shvetsov	mrs	x13, MPAMVPM2_EL2
1332825946eSMax Shvetsov	mrs	x14, MPAMVPM3_EL2
1342825946eSMax Shvetsov	stp	x13, x14, [x0, #CTX_MPAMVPM2_EL2]
1352825946eSMax Shvetsov
1362825946eSMax Shvetsov	mrs	x15, MPAMVPM4_EL2
1372825946eSMax Shvetsov	mrs	x16, MPAMVPM5_EL2
1382825946eSMax Shvetsov	stp	x15, x16, [x0, #CTX_MPAMVPM4_EL2]
1392825946eSMax Shvetsov
1402825946eSMax Shvetsov	mrs	x17, MPAMVPM6_EL2
1412825946eSMax Shvetsov	mrs	x9, MPAMVPM7_EL2
1422825946eSMax Shvetsov	stp	x17, x9, [x0, #CTX_MPAMVPM6_EL2]
1432825946eSMax Shvetsov
1442825946eSMax Shvetsov	mrs	x10, MPAMVPMV_EL2
1452825946eSMax Shvetsov	str	x10, [x0, #CTX_MPAMVPMV_EL2]
1462825946eSMax Shvetsov#endif
1472825946eSMax Shvetsov
1482825946eSMax Shvetsov
1492825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 6)
1502825946eSMax Shvetsov	mrs	x11, HAFGRTR_EL2
1512825946eSMax Shvetsov	mrs	x12, HDFGRTR_EL2
1522825946eSMax Shvetsov	stp	x11, x12, [x0, #CTX_HAFGRTR_EL2]
1532825946eSMax Shvetsov
1542825946eSMax Shvetsov	mrs	x13, HDFGWTR_EL2
1552825946eSMax Shvetsov	mrs	x14, HFGITR_EL2
1562825946eSMax Shvetsov	stp	x13, x14, [x0, #CTX_HDFGWTR_EL2]
1572825946eSMax Shvetsov
1582825946eSMax Shvetsov	mrs	x15, HFGRTR_EL2
1592825946eSMax Shvetsov	mrs	x16, HFGWTR_EL2
1602825946eSMax Shvetsov	stp	x15, x16, [x0, #CTX_HFGRTR_EL2]
1612825946eSMax Shvetsov
1622825946eSMax Shvetsov	mrs	x17, CNTPOFF_EL2
1632825946eSMax Shvetsov	str	x17, [x0, #CTX_CNTPOFF_EL2]
1642825946eSMax Shvetsov#endif
1652825946eSMax Shvetsov
1662825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 4)
1672825946eSMax Shvetsov	mrs	x9, cnthps_ctl_el2
1682825946eSMax Shvetsov	mrs	x10, cnthps_cval_el2
1692825946eSMax Shvetsov	stp	x9, x10, [x0, #CTX_CNTHPS_CTL_EL2]
1702825946eSMax Shvetsov
1712825946eSMax Shvetsov	mrs	x11, cnthps_tval_el2
1722825946eSMax Shvetsov	mrs	x12, cnthvs_ctl_el2
1732825946eSMax Shvetsov	stp	x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2]
1742825946eSMax Shvetsov
1752825946eSMax Shvetsov	mrs	x13, cnthvs_cval_el2
1762825946eSMax Shvetsov	mrs	x14, cnthvs_tval_el2
1772825946eSMax Shvetsov	stp	x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2]
1782825946eSMax Shvetsov
1792825946eSMax Shvetsov	mrs	x15, cnthv_ctl_el2
1802825946eSMax Shvetsov	mrs	x16, cnthv_cval_el2
1812825946eSMax Shvetsov	stp	x15, x16, [x0, #CTX_CNTHV_CTL_EL2]
1822825946eSMax Shvetsov
1832825946eSMax Shvetsov	mrs	x17, cnthv_tval_el2
1842825946eSMax Shvetsov	mrs	x9, contextidr_el2
1852825946eSMax Shvetsov	stp	x17, x9, [x0, #CTX_CNTHV_TVAL_EL2]
1862825946eSMax Shvetsov
1872825946eSMax Shvetsov	mrs	x10, sder32_el2
1882825946eSMax Shvetsov	str	x10, [x0, #CTX_SDER32_EL2]
1892825946eSMax Shvetsov
1902825946eSMax Shvetsov	mrs	x11, ttbr1_el2
1912825946eSMax Shvetsov	str	x11, [x0, #CTX_TTBR1_EL2]
1922825946eSMax Shvetsov
1932825946eSMax Shvetsov	mrs	x12, vdisr_el2
1942825946eSMax Shvetsov	str	x12, [x0, #CTX_VDISR_EL2]
1952825946eSMax Shvetsov
1962825946eSMax Shvetsov	mrs	x13, vncr_el2
1972825946eSMax Shvetsov	str	x13, [x0, #CTX_VNCR_EL2]
1982825946eSMax Shvetsov
1992825946eSMax Shvetsov	mrs	x14, vsesr_el2
2002825946eSMax Shvetsov	str	x14, [x0, #CTX_VSESR_EL2]
2012825946eSMax Shvetsov
2022825946eSMax Shvetsov	mrs	x15, vstcr_el2
2032825946eSMax Shvetsov	str	x15, [x0, #CTX_VSTCR_EL2]
2042825946eSMax Shvetsov
2052825946eSMax Shvetsov	mrs	x16, vsttbr_el2
2062825946eSMax Shvetsov	str	x16, [x0, #CTX_VSTTBR_EL2]
207*7f164a83SOlivier Deprez
208*7f164a83SOlivier Deprez	mrs	x17, TRFCR_EL2
209*7f164a83SOlivier Deprez	str	x17, [x0, #CTX_TRFCR_EL2]
2102825946eSMax Shvetsov#endif
2112825946eSMax Shvetsov
2122825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 5)
213*7f164a83SOlivier Deprez	mrs	x9, scxtnum_el2
214*7f164a83SOlivier Deprez	str	x9, [x0, #CTX_SCXTNUM_EL2]
2152825946eSMax Shvetsov#endif
21628f39f02SMax Shvetsov
21728f39f02SMax Shvetsov	ret
21828f39f02SMax Shvetsovendfunc el2_sysregs_context_save
21928f39f02SMax Shvetsov
22028f39f02SMax Shvetsov/* -----------------------------------------------------
22128f39f02SMax Shvetsov * The following function strictly follows the AArch64
22228f39f02SMax Shvetsov * PCS to use x9-x17 (temporary caller-saved registers)
2232825946eSMax Shvetsov * to restore EL2 system register context.  It assumes
2242825946eSMax Shvetsov * that 'x0' is pointing to a 'el2_sys_regs' structure
22528f39f02SMax Shvetsov * from where the register context will be restored
2262825946eSMax Shvetsov
2272825946eSMax Shvetsov * The following registers are not restored
2282825946eSMax Shvetsov * AMEVCNTVOFF0<n>_EL2
2292825946eSMax Shvetsov * AMEVCNTVOFF1<n>_EL2
2302825946eSMax Shvetsov * ICH_AP0R<n>_EL2
2312825946eSMax Shvetsov * ICH_AP1R<n>_EL2
2322825946eSMax Shvetsov * ICH_LR<n>_EL2
23328f39f02SMax Shvetsov * -----------------------------------------------------
23428f39f02SMax Shvetsov */
23528f39f02SMax Shvetsovfunc el2_sysregs_context_restore
23628f39f02SMax Shvetsov
2372825946eSMax Shvetsov	ldp	x9, x10, [x0, #CTX_ACTLR_EL2]
23828f39f02SMax Shvetsov	msr	actlr_el2, x9
2392825946eSMax Shvetsov	msr	afsr0_el2, x10
24028f39f02SMax Shvetsov
2412825946eSMax Shvetsov	ldp	x11, x12, [x0, #CTX_AFSR1_EL2]
2422825946eSMax Shvetsov	msr	afsr1_el2, x11
2432825946eSMax Shvetsov	msr	amair_el2, x12
24428f39f02SMax Shvetsov
2452825946eSMax Shvetsov	ldp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
2462825946eSMax Shvetsov	msr	cnthctl_el2, x13
2472825946eSMax Shvetsov	msr	cnthp_ctl_el2, x14
24828f39f02SMax Shvetsov
2492825946eSMax Shvetsov	ldp	x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
2502825946eSMax Shvetsov	msr	cnthp_cval_el2, x15
2512825946eSMax Shvetsov	msr	cnthp_tval_el2, x16
25228f39f02SMax Shvetsov
2532825946eSMax Shvetsov	ldp	x17, x9, [x0, #CTX_CNTVOFF_EL2]
2542825946eSMax Shvetsov	msr	cntvoff_el2, x17
25528f39f02SMax Shvetsov	msr	cptr_el2, x9
25628f39f02SMax Shvetsov
2572825946eSMax Shvetsov	ldp	x10, x11, [x0, #CTX_DBGVCR32_EL2]
2582825946eSMax Shvetsov	msr	dbgvcr32_el2, x10
2592825946eSMax Shvetsov	msr	elr_el2, x11
26028f39f02SMax Shvetsov
2612825946eSMax Shvetsov	ldp	x14, x15, [x0, #CTX_ESR_EL2]
2622825946eSMax Shvetsov	msr	esr_el2, x14
2632825946eSMax Shvetsov	msr	far_el2, x15
26428f39f02SMax Shvetsov
2652825946eSMax Shvetsov	ldp	x16, x17, [x0, #CTX_FPEXC32_EL2]
2662825946eSMax Shvetsov	msr	fpexc32_el2, x16
2672825946eSMax Shvetsov	msr	hacr_el2, x17
26828f39f02SMax Shvetsov
2692825946eSMax Shvetsov	ldp	x9, x10, [x0, #CTX_HCR_EL2]
27028f39f02SMax Shvetsov	msr	hcr_el2, x9
2712825946eSMax Shvetsov	msr	hpfar_el2, x10
27228f39f02SMax Shvetsov
2732825946eSMax Shvetsov	ldp	x11, x12, [x0, #CTX_HSTR_EL2]
2742825946eSMax Shvetsov	msr	hstr_el2, x11
2752825946eSMax Shvetsov	msr	ICC_SRE_EL2, x12
27628f39f02SMax Shvetsov
2772825946eSMax Shvetsov	ldp	x13, x14, [x0, #CTX_ICH_HCR_EL2]
2782825946eSMax Shvetsov	msr	ICH_HCR_EL2, x13
2792825946eSMax Shvetsov	msr	ICH_VMCR_EL2, x14
28028f39f02SMax Shvetsov
2812825946eSMax Shvetsov	ldp	x15, x16, [x0, #CTX_MAIR_EL2]
2822825946eSMax Shvetsov	msr	mair_el2, x15
2832825946eSMax Shvetsov	msr	mdcr_el2, x16
28428f39f02SMax Shvetsov
2852825946eSMax Shvetsov	ldp	x17, x9, [x0, #CTX_PMSCR_EL2]
2862825946eSMax Shvetsov	msr	PMSCR_EL2, x17
28728f39f02SMax Shvetsov	msr	sctlr_el2, x9
28828f39f02SMax Shvetsov
2892825946eSMax Shvetsov	ldp	x10, x11, [x0, #CTX_SPSR_EL2]
2902825946eSMax Shvetsov	msr	spsr_el2, x10
2912825946eSMax Shvetsov	msr	sp_el2, x11
29228f39f02SMax Shvetsov
2932825946eSMax Shvetsov	ldp	x12, x13, [x0, #CTX_TCR_EL2]
2942825946eSMax Shvetsov	msr	tcr_el2, x12
295*7f164a83SOlivier Deprez	msr	tpidr_el2, x13
29628f39f02SMax Shvetsov
2972825946eSMax Shvetsov	ldp	x14, x15, [x0, #CTX_TTBR0_EL2]
2982825946eSMax Shvetsov	msr	ttbr0_el2, x14
2992825946eSMax Shvetsov	msr	vbar_el2, x15
30028f39f02SMax Shvetsov
3012825946eSMax Shvetsov	ldp	x16, x17, [x0, #CTX_VMPIDR_EL2]
3022825946eSMax Shvetsov	msr	vmpidr_el2, x16
3032825946eSMax Shvetsov	msr	vpidr_el2, x17
30428f39f02SMax Shvetsov
3052825946eSMax Shvetsov	ldp	x9, x10, [x0, #CTX_VTCR_EL2]
30628f39f02SMax Shvetsov	msr	vtcr_el2, x9
3072825946eSMax Shvetsov	msr	vttbr_el2, x10
30828f39f02SMax Shvetsov
3092825946eSMax Shvetsov#if CTX_INCLUDE_MTE_REGS
3102825946eSMax Shvetsov	ldr	x11, [x0, #CTX_TFSR_EL2]
3112825946eSMax Shvetsov	msr	TFSR_EL2, x11
3122825946eSMax Shvetsov#endif
31328f39f02SMax Shvetsov
3142825946eSMax Shvetsov#if ENABLE_MPAM_FOR_LOWER_ELS
3152825946eSMax Shvetsov	ldp	x9, x10, [x0, #CTX_MPAM2_EL2]
3162825946eSMax Shvetsov	msr	MPAM2_EL2, x9
3172825946eSMax Shvetsov	msr	MPAMHCR_EL2, x10
3182825946eSMax Shvetsov
3192825946eSMax Shvetsov	ldp	x11, x12, [x0, #CTX_MPAMVPM0_EL2]
3202825946eSMax Shvetsov	msr	MPAMVPM0_EL2, x11
3212825946eSMax Shvetsov	msr	MPAMVPM1_EL2, x12
3222825946eSMax Shvetsov
3232825946eSMax Shvetsov	ldp	x13, x14, [x0, #CTX_MPAMVPM2_EL2]
3242825946eSMax Shvetsov	msr	MPAMVPM2_EL2, x13
3252825946eSMax Shvetsov	msr	MPAMVPM3_EL2, x14
3262825946eSMax Shvetsov
3272825946eSMax Shvetsov	ldp	x15, x16, [x0, #CTX_MPAMVPM4_EL2]
3282825946eSMax Shvetsov	msr	MPAMVPM4_EL2, x15
3292825946eSMax Shvetsov	msr	MPAMVPM5_EL2, x16
3302825946eSMax Shvetsov
3312825946eSMax Shvetsov	ldp	x17, x9, [x0, #CTX_MPAMVPM6_EL2]
3322825946eSMax Shvetsov	msr	MPAMVPM6_EL2, x17
3332825946eSMax Shvetsov	msr	MPAMVPM7_EL2, x9
3342825946eSMax Shvetsov
3352825946eSMax Shvetsov	ldr	x10, [x0, #CTX_MPAMVPMV_EL2]
3362825946eSMax Shvetsov	msr	MPAMVPMV_EL2, x10
3372825946eSMax Shvetsov#endif
3382825946eSMax Shvetsov
3392825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 6)
3402825946eSMax Shvetsov	ldp	x11, x12, [x0, #CTX_HAFGRTR_EL2]
3412825946eSMax Shvetsov	msr	HAFGRTR_EL2, x11
3422825946eSMax Shvetsov	msr	HDFGRTR_EL2, x12
3432825946eSMax Shvetsov
3442825946eSMax Shvetsov	ldp	x13, x14, [x0, #CTX_HDFGWTR_EL2]
3452825946eSMax Shvetsov	msr	HDFGWTR_EL2, x13
3462825946eSMax Shvetsov	msr	HFGITR_EL2, x14
3472825946eSMax Shvetsov
3482825946eSMax Shvetsov	ldp	x15, x16, [x0, #CTX_HFGRTR_EL2]
3492825946eSMax Shvetsov	msr	HFGRTR_EL2, x15
3502825946eSMax Shvetsov	msr	HFGWTR_EL2, x16
3512825946eSMax Shvetsov
3522825946eSMax Shvetsov	ldr	x17, [x0, #CTX_CNTPOFF_EL2]
3532825946eSMax Shvetsov	msr	CNTPOFF_EL2, x17
3542825946eSMax Shvetsov#endif
3552825946eSMax Shvetsov
3562825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 4)
3572825946eSMax Shvetsov	ldp	x9, x10, [x0, #CTX_CNTHPS_CTL_EL2]
3582825946eSMax Shvetsov	msr	cnthps_ctl_el2, x9
3592825946eSMax Shvetsov	msr	cnthps_cval_el2, x10
3602825946eSMax Shvetsov
3612825946eSMax Shvetsov	ldp	x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2]
3622825946eSMax Shvetsov	msr	cnthps_tval_el2, x11
3632825946eSMax Shvetsov	msr	cnthvs_ctl_el2, x12
3642825946eSMax Shvetsov
3652825946eSMax Shvetsov	ldp	x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2]
3662825946eSMax Shvetsov	msr	cnthvs_cval_el2, x13
3672825946eSMax Shvetsov	msr	cnthvs_tval_el2, x14
3682825946eSMax Shvetsov
3692825946eSMax Shvetsov	ldp	x15, x16, [x0, #CTX_CNTHV_CTL_EL2]
3702825946eSMax Shvetsov	msr	cnthv_ctl_el2, x15
3712825946eSMax Shvetsov	msr	cnthv_cval_el2, x16
3722825946eSMax Shvetsov
3732825946eSMax Shvetsov	ldp	x17, x9, [x0, #CTX_CNTHV_TVAL_EL2]
3742825946eSMax Shvetsov	msr	cnthv_tval_el2, x17
3752825946eSMax Shvetsov	msr	contextidr_el2, x9
3762825946eSMax Shvetsov
3772825946eSMax Shvetsov	ldr	x10, [x0, #CTX_SDER32_EL2]
3782825946eSMax Shvetsov	msr	sder32_el2, x10
3792825946eSMax Shvetsov
3802825946eSMax Shvetsov	ldr	x11, [x0, #CTX_TTBR1_EL2]
3812825946eSMax Shvetsov	msr	ttbr1_el2, x11
3822825946eSMax Shvetsov
3832825946eSMax Shvetsov	ldr	x12, [x0, #CTX_VDISR_EL2]
3842825946eSMax Shvetsov	msr	vdisr_el2, x12
3852825946eSMax Shvetsov
3862825946eSMax Shvetsov	ldr	x13, [x0, #CTX_VNCR_EL2]
3872825946eSMax Shvetsov	msr	vncr_el2, x13
3882825946eSMax Shvetsov
3892825946eSMax Shvetsov	ldr	x14, [x0, #CTX_VSESR_EL2]
3902825946eSMax Shvetsov	msr	vsesr_el2, x14
3912825946eSMax Shvetsov
3922825946eSMax Shvetsov	ldr	x15, [x0, #CTX_VSTCR_EL2]
3932825946eSMax Shvetsov	msr	vstcr_el2, x15
3942825946eSMax Shvetsov
3952825946eSMax Shvetsov	ldr	x16, [x0, #CTX_VSTTBR_EL2]
3962825946eSMax Shvetsov	msr	vsttbr_el2, x16
397*7f164a83SOlivier Deprez
398*7f164a83SOlivier Deprez	ldr	x17, [x0, #CTX_TRFCR_EL2]
399*7f164a83SOlivier Deprez	msr	TRFCR_EL2, x17
4002825946eSMax Shvetsov#endif
4012825946eSMax Shvetsov
4022825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 5)
403*7f164a83SOlivier Deprez	ldr	x9, [x0, #CTX_SCXTNUM_EL2]
404*7f164a83SOlivier Deprez	msr	scxtnum_el2, x9
4052825946eSMax Shvetsov#endif
40628f39f02SMax Shvetsov
40728f39f02SMax Shvetsov	ret
40828f39f02SMax Shvetsovendfunc el2_sysregs_context_restore
40928f39f02SMax Shvetsov
41028f39f02SMax Shvetsov#endif /* CTX_INCLUDE_EL2_REGS */
41128f39f02SMax Shvetsov
412ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
413ed108b56SAlexei Fedorov * The following function strictly follows the AArch64 PCS to use
414ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers) to save EL1 system
415ed108b56SAlexei Fedorov * register context. It assumes that 'x0' is pointing to a
416ed108b56SAlexei Fedorov * 'el1_sys_regs' structure where the register context will be saved.
417ed108b56SAlexei Fedorov * ------------------------------------------------------------------
418532ed618SSoby Mathew */
419532ed618SSoby Mathewfunc el1_sysregs_context_save
420532ed618SSoby Mathew
421532ed618SSoby Mathew	mrs	x9, spsr_el1
422532ed618SSoby Mathew	mrs	x10, elr_el1
423532ed618SSoby Mathew	stp	x9, x10, [x0, #CTX_SPSR_EL1]
424532ed618SSoby Mathew
425532ed618SSoby Mathew	mrs	x15, sctlr_el1
426532ed618SSoby Mathew	mrs	x16, actlr_el1
427532ed618SSoby Mathew	stp	x15, x16, [x0, #CTX_SCTLR_EL1]
428532ed618SSoby Mathew
429532ed618SSoby Mathew	mrs	x17, cpacr_el1
430532ed618SSoby Mathew	mrs	x9, csselr_el1
431532ed618SSoby Mathew	stp	x17, x9, [x0, #CTX_CPACR_EL1]
432532ed618SSoby Mathew
433532ed618SSoby Mathew	mrs	x10, sp_el1
434532ed618SSoby Mathew	mrs	x11, esr_el1
435532ed618SSoby Mathew	stp	x10, x11, [x0, #CTX_SP_EL1]
436532ed618SSoby Mathew
437532ed618SSoby Mathew	mrs	x12, ttbr0_el1
438532ed618SSoby Mathew	mrs	x13, ttbr1_el1
439532ed618SSoby Mathew	stp	x12, x13, [x0, #CTX_TTBR0_EL1]
440532ed618SSoby Mathew
441532ed618SSoby Mathew	mrs	x14, mair_el1
442532ed618SSoby Mathew	mrs	x15, amair_el1
443532ed618SSoby Mathew	stp	x14, x15, [x0, #CTX_MAIR_EL1]
444532ed618SSoby Mathew
445532ed618SSoby Mathew	mrs	x16, tcr_el1
446532ed618SSoby Mathew	mrs	x17, tpidr_el1
447532ed618SSoby Mathew	stp	x16, x17, [x0, #CTX_TCR_EL1]
448532ed618SSoby Mathew
449532ed618SSoby Mathew	mrs	x9, tpidr_el0
450532ed618SSoby Mathew	mrs	x10, tpidrro_el0
451532ed618SSoby Mathew	stp	x9, x10, [x0, #CTX_TPIDR_EL0]
452532ed618SSoby Mathew
453532ed618SSoby Mathew	mrs	x13, par_el1
454532ed618SSoby Mathew	mrs	x14, far_el1
455532ed618SSoby Mathew	stp	x13, x14, [x0, #CTX_PAR_EL1]
456532ed618SSoby Mathew
457532ed618SSoby Mathew	mrs	x15, afsr0_el1
458532ed618SSoby Mathew	mrs	x16, afsr1_el1
459532ed618SSoby Mathew	stp	x15, x16, [x0, #CTX_AFSR0_EL1]
460532ed618SSoby Mathew
461532ed618SSoby Mathew	mrs	x17, contextidr_el1
462532ed618SSoby Mathew	mrs	x9, vbar_el1
463532ed618SSoby Mathew	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
464532ed618SSoby Mathew
465532ed618SSoby Mathew	/* Save AArch32 system registers if the build has instructed so */
466532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS
467532ed618SSoby Mathew	mrs	x11, spsr_abt
468532ed618SSoby Mathew	mrs	x12, spsr_und
469532ed618SSoby Mathew	stp	x11, x12, [x0, #CTX_SPSR_ABT]
470532ed618SSoby Mathew
471532ed618SSoby Mathew	mrs	x13, spsr_irq
472532ed618SSoby Mathew	mrs	x14, spsr_fiq
473532ed618SSoby Mathew	stp	x13, x14, [x0, #CTX_SPSR_IRQ]
474532ed618SSoby Mathew
475532ed618SSoby Mathew	mrs	x15, dacr32_el2
476532ed618SSoby Mathew	mrs	x16, ifsr32_el2
477532ed618SSoby Mathew	stp	x15, x16, [x0, #CTX_DACR32_EL2]
478532ed618SSoby Mathew#endif
479532ed618SSoby Mathew
480532ed618SSoby Mathew	/* Save NS timer registers if the build has instructed so */
481532ed618SSoby Mathew#if NS_TIMER_SWITCH
482532ed618SSoby Mathew	mrs	x10, cntp_ctl_el0
483532ed618SSoby Mathew	mrs	x11, cntp_cval_el0
484532ed618SSoby Mathew	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
485532ed618SSoby Mathew
486532ed618SSoby Mathew	mrs	x12, cntv_ctl_el0
487532ed618SSoby Mathew	mrs	x13, cntv_cval_el0
488532ed618SSoby Mathew	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
489532ed618SSoby Mathew
490532ed618SSoby Mathew	mrs	x14, cntkctl_el1
491532ed618SSoby Mathew	str	x14, [x0, #CTX_CNTKCTL_EL1]
492532ed618SSoby Mathew#endif
493532ed618SSoby Mathew
4949dd94382SJustin Chadwell	/* Save MTE system registers if the build has instructed so */
4959dd94382SJustin Chadwell#if CTX_INCLUDE_MTE_REGS
4969dd94382SJustin Chadwell	mrs	x15, TFSRE0_EL1
4979dd94382SJustin Chadwell	mrs	x16, TFSR_EL1
4989dd94382SJustin Chadwell	stp	x15, x16, [x0, #CTX_TFSRE0_EL1]
4999dd94382SJustin Chadwell
5009dd94382SJustin Chadwell	mrs	x9, RGSR_EL1
5019dd94382SJustin Chadwell	mrs	x10, GCR_EL1
5029dd94382SJustin Chadwell	stp	x9, x10, [x0, #CTX_RGSR_EL1]
5039dd94382SJustin Chadwell#endif
5049dd94382SJustin Chadwell
505532ed618SSoby Mathew	ret
506532ed618SSoby Mathewendfunc el1_sysregs_context_save
507532ed618SSoby Mathew
508ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
509ed108b56SAlexei Fedorov * The following function strictly follows the AArch64 PCS to use
510ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers) to restore EL1 system
511ed108b56SAlexei Fedorov * register context.  It assumes that 'x0' is pointing to a
512ed108b56SAlexei Fedorov * 'el1_sys_regs' structure from where the register context will be
513ed108b56SAlexei Fedorov * restored
514ed108b56SAlexei Fedorov * ------------------------------------------------------------------
515532ed618SSoby Mathew */
516532ed618SSoby Mathewfunc el1_sysregs_context_restore
517532ed618SSoby Mathew
518532ed618SSoby Mathew	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
519532ed618SSoby Mathew	msr	spsr_el1, x9
520532ed618SSoby Mathew	msr	elr_el1, x10
521532ed618SSoby Mathew
522532ed618SSoby Mathew	ldp	x15, x16, [x0, #CTX_SCTLR_EL1]
523532ed618SSoby Mathew	msr	sctlr_el1, x15
524532ed618SSoby Mathew	msr	actlr_el1, x16
525532ed618SSoby Mathew
526532ed618SSoby Mathew	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
527532ed618SSoby Mathew	msr	cpacr_el1, x17
528532ed618SSoby Mathew	msr	csselr_el1, x9
529532ed618SSoby Mathew
530532ed618SSoby Mathew	ldp	x10, x11, [x0, #CTX_SP_EL1]
531532ed618SSoby Mathew	msr	sp_el1, x10
532532ed618SSoby Mathew	msr	esr_el1, x11
533532ed618SSoby Mathew
534532ed618SSoby Mathew	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
535532ed618SSoby Mathew	msr	ttbr0_el1, x12
536532ed618SSoby Mathew	msr	ttbr1_el1, x13
537532ed618SSoby Mathew
538532ed618SSoby Mathew	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
539532ed618SSoby Mathew	msr	mair_el1, x14
540532ed618SSoby Mathew	msr	amair_el1, x15
541532ed618SSoby Mathew
542532ed618SSoby Mathew	ldp	x16, x17, [x0, #CTX_TCR_EL1]
543532ed618SSoby Mathew	msr	tcr_el1, x16
544532ed618SSoby Mathew	msr	tpidr_el1, x17
545532ed618SSoby Mathew
546532ed618SSoby Mathew	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
547532ed618SSoby Mathew	msr	tpidr_el0, x9
548532ed618SSoby Mathew	msr	tpidrro_el0, x10
549532ed618SSoby Mathew
550532ed618SSoby Mathew	ldp	x13, x14, [x0, #CTX_PAR_EL1]
551532ed618SSoby Mathew	msr	par_el1, x13
552532ed618SSoby Mathew	msr	far_el1, x14
553532ed618SSoby Mathew
554532ed618SSoby Mathew	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
555532ed618SSoby Mathew	msr	afsr0_el1, x15
556532ed618SSoby Mathew	msr	afsr1_el1, x16
557532ed618SSoby Mathew
558532ed618SSoby Mathew	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
559532ed618SSoby Mathew	msr	contextidr_el1, x17
560532ed618SSoby Mathew	msr	vbar_el1, x9
561532ed618SSoby Mathew
562532ed618SSoby Mathew	/* Restore AArch32 system registers if the build has instructed so */
563532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS
564532ed618SSoby Mathew	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
565532ed618SSoby Mathew	msr	spsr_abt, x11
566532ed618SSoby Mathew	msr	spsr_und, x12
567532ed618SSoby Mathew
568532ed618SSoby Mathew	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
569532ed618SSoby Mathew	msr	spsr_irq, x13
570532ed618SSoby Mathew	msr	spsr_fiq, x14
571532ed618SSoby Mathew
572532ed618SSoby Mathew	ldp	x15, x16, [x0, #CTX_DACR32_EL2]
573532ed618SSoby Mathew	msr	dacr32_el2, x15
574532ed618SSoby Mathew	msr	ifsr32_el2, x16
575532ed618SSoby Mathew#endif
576532ed618SSoby Mathew	/* Restore NS timer registers if the build has instructed so */
577532ed618SSoby Mathew#if NS_TIMER_SWITCH
578532ed618SSoby Mathew	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
579532ed618SSoby Mathew	msr	cntp_ctl_el0, x10
580532ed618SSoby Mathew	msr	cntp_cval_el0, x11
581532ed618SSoby Mathew
582532ed618SSoby Mathew	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
583532ed618SSoby Mathew	msr	cntv_ctl_el0, x12
584532ed618SSoby Mathew	msr	cntv_cval_el0, x13
585532ed618SSoby Mathew
586532ed618SSoby Mathew	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
587532ed618SSoby Mathew	msr	cntkctl_el1, x14
588532ed618SSoby Mathew#endif
5899dd94382SJustin Chadwell	/* Restore MTE system registers if the build has instructed so */
5909dd94382SJustin Chadwell#if CTX_INCLUDE_MTE_REGS
5919dd94382SJustin Chadwell	ldp	x11, x12, [x0, #CTX_TFSRE0_EL1]
5929dd94382SJustin Chadwell	msr	TFSRE0_EL1, x11
5939dd94382SJustin Chadwell	msr	TFSR_EL1, x12
5949dd94382SJustin Chadwell
5959dd94382SJustin Chadwell	ldp	x13, x14, [x0, #CTX_RGSR_EL1]
5969dd94382SJustin Chadwell	msr	RGSR_EL1, x13
5979dd94382SJustin Chadwell	msr	GCR_EL1, x14
5989dd94382SJustin Chadwell#endif
599532ed618SSoby Mathew
600532ed618SSoby Mathew	/* No explict ISB required here as ERET covers it */
601532ed618SSoby Mathew	ret
602532ed618SSoby Mathewendfunc el1_sysregs_context_restore
603532ed618SSoby Mathew
604ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
605ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use
606ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
607ed108b56SAlexei Fedorov * to save floating point register context. It assumes that 'x0' is
608ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure where the register context will
609532ed618SSoby Mathew * be saved.
610532ed618SSoby Mathew *
611ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set.
612ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in
613ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared.
614532ed618SSoby Mathew *
615532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world
616ed108b56SAlexei Fedorov * ------------------------------------------------------------------
617532ed618SSoby Mathew */
618532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS
619532ed618SSoby Mathewfunc fpregs_context_save
620532ed618SSoby Mathew	stp	q0, q1, [x0, #CTX_FP_Q0]
621532ed618SSoby Mathew	stp	q2, q3, [x0, #CTX_FP_Q2]
622532ed618SSoby Mathew	stp	q4, q5, [x0, #CTX_FP_Q4]
623532ed618SSoby Mathew	stp	q6, q7, [x0, #CTX_FP_Q6]
624532ed618SSoby Mathew	stp	q8, q9, [x0, #CTX_FP_Q8]
625532ed618SSoby Mathew	stp	q10, q11, [x0, #CTX_FP_Q10]
626532ed618SSoby Mathew	stp	q12, q13, [x0, #CTX_FP_Q12]
627532ed618SSoby Mathew	stp	q14, q15, [x0, #CTX_FP_Q14]
628532ed618SSoby Mathew	stp	q16, q17, [x0, #CTX_FP_Q16]
629532ed618SSoby Mathew	stp	q18, q19, [x0, #CTX_FP_Q18]
630532ed618SSoby Mathew	stp	q20, q21, [x0, #CTX_FP_Q20]
631532ed618SSoby Mathew	stp	q22, q23, [x0, #CTX_FP_Q22]
632532ed618SSoby Mathew	stp	q24, q25, [x0, #CTX_FP_Q24]
633532ed618SSoby Mathew	stp	q26, q27, [x0, #CTX_FP_Q26]
634532ed618SSoby Mathew	stp	q28, q29, [x0, #CTX_FP_Q28]
635532ed618SSoby Mathew	stp	q30, q31, [x0, #CTX_FP_Q30]
636532ed618SSoby Mathew
637532ed618SSoby Mathew	mrs	x9, fpsr
638532ed618SSoby Mathew	str	x9, [x0, #CTX_FP_FPSR]
639532ed618SSoby Mathew
640532ed618SSoby Mathew	mrs	x10, fpcr
641532ed618SSoby Mathew	str	x10, [x0, #CTX_FP_FPCR]
642532ed618SSoby Mathew
64391089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS
64491089f36SDavid Cunado	mrs	x11, fpexc32_el2
64591089f36SDavid Cunado	str	x11, [x0, #CTX_FP_FPEXC32_EL2]
64691089f36SDavid Cunado#endif
647532ed618SSoby Mathew	ret
648532ed618SSoby Mathewendfunc fpregs_context_save
649532ed618SSoby Mathew
650ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
651ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use x9-x17
652ed108b56SAlexei Fedorov * (temporary caller-saved registers according to AArch64 PCS) to
653ed108b56SAlexei Fedorov * restore floating point register context. It assumes that 'x0' is
654ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure from where the register context
655532ed618SSoby Mathew * will be restored.
656532ed618SSoby Mathew *
657ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set.
658ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in
659ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared.
660532ed618SSoby Mathew *
661532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world
662ed108b56SAlexei Fedorov * ------------------------------------------------------------------
663532ed618SSoby Mathew */
664532ed618SSoby Mathewfunc fpregs_context_restore
665532ed618SSoby Mathew	ldp	q0, q1, [x0, #CTX_FP_Q0]
666532ed618SSoby Mathew	ldp	q2, q3, [x0, #CTX_FP_Q2]
667532ed618SSoby Mathew	ldp	q4, q5, [x0, #CTX_FP_Q4]
668532ed618SSoby Mathew	ldp	q6, q7, [x0, #CTX_FP_Q6]
669532ed618SSoby Mathew	ldp	q8, q9, [x0, #CTX_FP_Q8]
670532ed618SSoby Mathew	ldp	q10, q11, [x0, #CTX_FP_Q10]
671532ed618SSoby Mathew	ldp	q12, q13, [x0, #CTX_FP_Q12]
672532ed618SSoby Mathew	ldp	q14, q15, [x0, #CTX_FP_Q14]
673532ed618SSoby Mathew	ldp	q16, q17, [x0, #CTX_FP_Q16]
674532ed618SSoby Mathew	ldp	q18, q19, [x0, #CTX_FP_Q18]
675532ed618SSoby Mathew	ldp	q20, q21, [x0, #CTX_FP_Q20]
676532ed618SSoby Mathew	ldp	q22, q23, [x0, #CTX_FP_Q22]
677532ed618SSoby Mathew	ldp	q24, q25, [x0, #CTX_FP_Q24]
678532ed618SSoby Mathew	ldp	q26, q27, [x0, #CTX_FP_Q26]
679532ed618SSoby Mathew	ldp	q28, q29, [x0, #CTX_FP_Q28]
680532ed618SSoby Mathew	ldp	q30, q31, [x0, #CTX_FP_Q30]
681532ed618SSoby Mathew
682532ed618SSoby Mathew	ldr	x9, [x0, #CTX_FP_FPSR]
683532ed618SSoby Mathew	msr	fpsr, x9
684532ed618SSoby Mathew
685532ed618SSoby Mathew	ldr	x10, [x0, #CTX_FP_FPCR]
686532ed618SSoby Mathew	msr	fpcr, x10
687532ed618SSoby Mathew
68891089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS
68991089f36SDavid Cunado	ldr	x11, [x0, #CTX_FP_FPEXC32_EL2]
69091089f36SDavid Cunado	msr	fpexc32_el2, x11
69191089f36SDavid Cunado#endif
692532ed618SSoby Mathew	/*
693532ed618SSoby Mathew	 * No explict ISB required here as ERET to
694532ed618SSoby Mathew	 * switch to secure EL1 or non-secure world
695532ed618SSoby Mathew	 * covers it
696532ed618SSoby Mathew	 */
697532ed618SSoby Mathew
698532ed618SSoby Mathew	ret
699532ed618SSoby Mathewendfunc fpregs_context_restore
700532ed618SSoby Mathew#endif /* CTX_INCLUDE_FPREGS */
701532ed618SSoby Mathew
702ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
703ed108b56SAlexei Fedorov * The following function is used to save and restore all the general
704ed108b56SAlexei Fedorov * purpose and ARMv8.3-PAuth (if enabled) registers.
705ed108b56SAlexei Fedorov * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3
706ed108b56SAlexei Fedorov * when ARMv8.5-PMU is implemented, and if called from Non-secure
707ed108b56SAlexei Fedorov * state saves PMCR_EL0 and disables Cycle Counter.
708ed108b56SAlexei Fedorov *
709ed108b56SAlexei Fedorov * Ideally we would only save and restore the callee saved registers
710ed108b56SAlexei Fedorov * when a world switch occurs but that type of implementation is more
711ed108b56SAlexei Fedorov * complex. So currently we will always save and restore these
712ed108b56SAlexei Fedorov * registers on entry and exit of EL3.
713ed108b56SAlexei Fedorov * These are not macros to ensure their invocation fits within the 32
714ed108b56SAlexei Fedorov * instructions per exception vector.
715532ed618SSoby Mathew * clobbers: x18
716ed108b56SAlexei Fedorov * ------------------------------------------------------------------
717532ed618SSoby Mathew */
718ed108b56SAlexei Fedorovfunc save_gp_pmcr_pauth_regs
719532ed618SSoby Mathew	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
720532ed618SSoby Mathew	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
721532ed618SSoby Mathew	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
722532ed618SSoby Mathew	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
723532ed618SSoby Mathew	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
724532ed618SSoby Mathew	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
725532ed618SSoby Mathew	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
726532ed618SSoby Mathew	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
727532ed618SSoby Mathew	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
728532ed618SSoby Mathew	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
729532ed618SSoby Mathew	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
730532ed618SSoby Mathew	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
731532ed618SSoby Mathew	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
732532ed618SSoby Mathew	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
733532ed618SSoby Mathew	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
734532ed618SSoby Mathew	mrs	x18, sp_el0
735532ed618SSoby Mathew	str	x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
736532ed618SSoby Mathew
737ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
738ed108b56SAlexei Fedorov	 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
739ed108b56SAlexei Fedorov	 * meaning that ARMv8-PMU is not implemented and PMCR_EL0
740ed108b56SAlexei Fedorov	 * should be saved in non-secure context.
741ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
742ef653d93SJeenu Viswambharan	 */
743ed108b56SAlexei Fedorov	mrs	x9, mdcr_el3
744ed108b56SAlexei Fedorov	tst	x9, #MDCR_SCCD_BIT
745ed108b56SAlexei Fedorov	bne	1f
746ed108b56SAlexei Fedorov
747ed108b56SAlexei Fedorov	/* Secure Cycle Counter is not disabled */
748ed108b56SAlexei Fedorov	mrs	x9, pmcr_el0
749ed108b56SAlexei Fedorov
750ed108b56SAlexei Fedorov	/* Check caller's security state */
751ed108b56SAlexei Fedorov	mrs	x10, scr_el3
752ed108b56SAlexei Fedorov	tst	x10, #SCR_NS_BIT
753ed108b56SAlexei Fedorov	beq	2f
754ed108b56SAlexei Fedorov
755ed108b56SAlexei Fedorov	/* Save PMCR_EL0 if called from Non-secure state */
756ed108b56SAlexei Fedorov	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
757ed108b56SAlexei Fedorov
758ed108b56SAlexei Fedorov	/* Disable cycle counter when event counting is prohibited */
759ed108b56SAlexei Fedorov2:	orr	x9, x9, #PMCR_EL0_DP_BIT
760ed108b56SAlexei Fedorov	msr	pmcr_el0, x9
761ed108b56SAlexei Fedorov	isb
762ed108b56SAlexei Fedorov1:
763ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS
764ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
765ed108b56SAlexei Fedorov 	 * Save the ARMv8.3-PAuth keys as they are not banked
766ed108b56SAlexei Fedorov 	 * by exception level
767ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
768ed108b56SAlexei Fedorov	 */
769ed108b56SAlexei Fedorov	add	x19, sp, #CTX_PAUTH_REGS_OFFSET
770ed108b56SAlexei Fedorov
771ed108b56SAlexei Fedorov	mrs	x20, APIAKeyLo_EL1	/* x21:x20 = APIAKey */
772ed108b56SAlexei Fedorov	mrs	x21, APIAKeyHi_EL1
773ed108b56SAlexei Fedorov	mrs	x22, APIBKeyLo_EL1	/* x23:x22 = APIBKey */
774ed108b56SAlexei Fedorov	mrs	x23, APIBKeyHi_EL1
775ed108b56SAlexei Fedorov	mrs	x24, APDAKeyLo_EL1	/* x25:x24 = APDAKey */
776ed108b56SAlexei Fedorov	mrs	x25, APDAKeyHi_EL1
777ed108b56SAlexei Fedorov	mrs	x26, APDBKeyLo_EL1	/* x27:x26 = APDBKey */
778ed108b56SAlexei Fedorov	mrs	x27, APDBKeyHi_EL1
779ed108b56SAlexei Fedorov	mrs	x28, APGAKeyLo_EL1	/* x29:x28 = APGAKey */
780ed108b56SAlexei Fedorov	mrs	x29, APGAKeyHi_EL1
781ed108b56SAlexei Fedorov
782ed108b56SAlexei Fedorov	stp	x20, x21, [x19, #CTX_PACIAKEY_LO]
783ed108b56SAlexei Fedorov	stp	x22, x23, [x19, #CTX_PACIBKEY_LO]
784ed108b56SAlexei Fedorov	stp	x24, x25, [x19, #CTX_PACDAKEY_LO]
785ed108b56SAlexei Fedorov	stp	x26, x27, [x19, #CTX_PACDBKEY_LO]
786ed108b56SAlexei Fedorov	stp	x28, x29, [x19, #CTX_PACGAKEY_LO]
787ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */
788ed108b56SAlexei Fedorov
789ed108b56SAlexei Fedorov	ret
790ed108b56SAlexei Fedorovendfunc save_gp_pmcr_pauth_regs
791ed108b56SAlexei Fedorov
792ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
793ed108b56SAlexei Fedorov * This function restores ARMv8.3-PAuth (if enabled) and all general
794ed108b56SAlexei Fedorov * purpose registers except x30 from the CPU context.
795ed108b56SAlexei Fedorov * x30 register must be explicitly restored by the caller.
796ed108b56SAlexei Fedorov * ------------------------------------------------------------------
797ed108b56SAlexei Fedorov */
798ed108b56SAlexei Fedorovfunc restore_gp_pmcr_pauth_regs
799ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS
800ed108b56SAlexei Fedorov 	/* Restore the ARMv8.3 PAuth keys */
801ed108b56SAlexei Fedorov	add	x10, sp, #CTX_PAUTH_REGS_OFFSET
802ed108b56SAlexei Fedorov
803ed108b56SAlexei Fedorov	ldp	x0, x1, [x10, #CTX_PACIAKEY_LO]	/* x1:x0 = APIAKey */
804ed108b56SAlexei Fedorov	ldp	x2, x3, [x10, #CTX_PACIBKEY_LO]	/* x3:x2 = APIBKey */
805ed108b56SAlexei Fedorov	ldp	x4, x5, [x10, #CTX_PACDAKEY_LO]	/* x5:x4 = APDAKey */
806ed108b56SAlexei Fedorov	ldp	x6, x7, [x10, #CTX_PACDBKEY_LO]	/* x7:x6 = APDBKey */
807ed108b56SAlexei Fedorov	ldp	x8, x9, [x10, #CTX_PACGAKEY_LO]	/* x9:x8 = APGAKey */
808ed108b56SAlexei Fedorov
809ed108b56SAlexei Fedorov	msr	APIAKeyLo_EL1, x0
810ed108b56SAlexei Fedorov	msr	APIAKeyHi_EL1, x1
811ed108b56SAlexei Fedorov	msr	APIBKeyLo_EL1, x2
812ed108b56SAlexei Fedorov	msr	APIBKeyHi_EL1, x3
813ed108b56SAlexei Fedorov	msr	APDAKeyLo_EL1, x4
814ed108b56SAlexei Fedorov	msr	APDAKeyHi_EL1, x5
815ed108b56SAlexei Fedorov	msr	APDBKeyLo_EL1, x6
816ed108b56SAlexei Fedorov	msr	APDBKeyHi_EL1, x7
817ed108b56SAlexei Fedorov	msr	APGAKeyLo_EL1, x8
818ed108b56SAlexei Fedorov	msr	APGAKeyHi_EL1, x9
819ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */
820ed108b56SAlexei Fedorov
821ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
822ed108b56SAlexei Fedorov	 * Restore PMCR_EL0 when returning to Non-secure state if
823ed108b56SAlexei Fedorov	 * Secure Cycle Counter is not disabled in MDCR_EL3 when
824ed108b56SAlexei Fedorov	 * ARMv8.5-PMU is implemented.
825ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
826ed108b56SAlexei Fedorov	 */
827ed108b56SAlexei Fedorov	mrs	x0, scr_el3
828ed108b56SAlexei Fedorov	tst	x0, #SCR_NS_BIT
829ed108b56SAlexei Fedorov	beq	2f
830ed108b56SAlexei Fedorov
831ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
832ed108b56SAlexei Fedorov	 * Back to Non-secure state.
833ed108b56SAlexei Fedorov	 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
834ed108b56SAlexei Fedorov	 * meaning that ARMv8-PMU is not implemented and PMCR_EL0
835ed108b56SAlexei Fedorov	 * should be restored from non-secure context.
836ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
837ed108b56SAlexei Fedorov	 */
838ed108b56SAlexei Fedorov	mrs	x0, mdcr_el3
839ed108b56SAlexei Fedorov	tst	x0, #MDCR_SCCD_BIT
840ed108b56SAlexei Fedorov	bne	2f
841ed108b56SAlexei Fedorov	ldr	x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
842ed108b56SAlexei Fedorov	msr	pmcr_el0, x0
843ed108b56SAlexei Fedorov2:
844532ed618SSoby Mathew	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
845532ed618SSoby Mathew	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
846532ed618SSoby Mathew	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
847532ed618SSoby Mathew	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
848532ed618SSoby Mathew	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
849532ed618SSoby Mathew	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
850532ed618SSoby Mathew	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
851532ed618SSoby Mathew	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
852ef653d93SJeenu Viswambharan	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
853532ed618SSoby Mathew	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
854532ed618SSoby Mathew	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
855532ed618SSoby Mathew	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
856532ed618SSoby Mathew	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
857532ed618SSoby Mathew	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
858ef653d93SJeenu Viswambharan	ldr	x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
859ef653d93SJeenu Viswambharan	msr	sp_el0, x28
860532ed618SSoby Mathew	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
861ef653d93SJeenu Viswambharan	ret
862ed108b56SAlexei Fedorovendfunc restore_gp_pmcr_pauth_regs
863ef653d93SJeenu Viswambharan
864ed108b56SAlexei Fedorov/* ------------------------------------------------------------------
865ed108b56SAlexei Fedorov * This routine assumes that the SP_EL3 is pointing to a valid
866ed108b56SAlexei Fedorov * context structure from where the gp regs and other special
867ed108b56SAlexei Fedorov * registers can be retrieved.
868ed108b56SAlexei Fedorov * ------------------------------------------------------------------
869532ed618SSoby Mathew */
870532ed618SSoby Mathewfunc el3_exit
871bb9549baSJan Dabros#if ENABLE_ASSERTIONS
872bb9549baSJan Dabros	/* el3_exit assumes SP_EL0 on entry */
873bb9549baSJan Dabros	mrs	x17, spsel
874bb9549baSJan Dabros	cmp	x17, #MODE_SP_EL0
875bb9549baSJan Dabros	ASM_ASSERT(eq)
876bb9549baSJan Dabros#endif
877bb9549baSJan Dabros
878ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
879ed108b56SAlexei Fedorov	 * Save the current SP_EL0 i.e. the EL3 runtime stack which
880ed108b56SAlexei Fedorov	 * will be used for handling the next SMC.
881ed108b56SAlexei Fedorov	 * Then switch to SP_EL3.
882ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
883532ed618SSoby Mathew	 */
884532ed618SSoby Mathew	mov	x17, sp
885ed108b56SAlexei Fedorov	msr	spsel, #MODE_SP_ELX
886532ed618SSoby Mathew	str	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
887532ed618SSoby Mathew
888ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
889532ed618SSoby Mathew	 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
890ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
891532ed618SSoby Mathew	 */
892532ed618SSoby Mathew	ldr	x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
893532ed618SSoby Mathew	ldp	x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
894532ed618SSoby Mathew	msr	scr_el3, x18
895532ed618SSoby Mathew	msr	spsr_el3, x16
896532ed618SSoby Mathew	msr	elr_el3, x17
897532ed618SSoby Mathew
898fe007b2eSDimitris Papastamos#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
899ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
900ed108b56SAlexei Fedorov	 * Restore mitigation state as it was on entry to EL3
901ed108b56SAlexei Fedorov	 * ----------------------------------------------------------
902ed108b56SAlexei Fedorov	 */
903fe007b2eSDimitris Papastamos	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
904ed108b56SAlexei Fedorov	cbz	x17, 1f
905fe007b2eSDimitris Papastamos	blr	x17
9064d1ccf0eSAntonio Nino Diaz1:
907fe007b2eSDimitris Papastamos#endif
908ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
909ed108b56SAlexei Fedorov	 * Restore general purpose (including x30), PMCR_EL0 and
910ed108b56SAlexei Fedorov	 * ARMv8.3-PAuth registers.
911ed108b56SAlexei Fedorov	 * Exit EL3 via ERET to a lower exception level.
912ed108b56SAlexei Fedorov 	 * ----------------------------------------------------------
913ed108b56SAlexei Fedorov 	 */
914ed108b56SAlexei Fedorov	bl	restore_gp_pmcr_pauth_regs
915ed108b56SAlexei Fedorov	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
916fe007b2eSDimitris Papastamos
917ed108b56SAlexei Fedorov#if IMAGE_BL31 && RAS_EXTENSION
918ed108b56SAlexei Fedorov	/* ----------------------------------------------------------
919ed108b56SAlexei Fedorov	 * Issue Error Synchronization Barrier to synchronize SErrors
920ed108b56SAlexei Fedorov	 * before exiting EL3. We're running with EAs unmasked, so
921ed108b56SAlexei Fedorov	 * any synchronized errors would be taken immediately;
922ed108b56SAlexei Fedorov	 * therefore no need to inspect DISR_EL1 register.
923ed108b56SAlexei Fedorov 	 * ----------------------------------------------------------
924ed108b56SAlexei Fedorov	 */
925ed108b56SAlexei Fedorov	esb
9265283962eSAntonio Nino Diaz#endif
927f461fe34SAnthony Steinhauser	exception_return
9285283962eSAntonio Nino Diaz
929532ed618SSoby Mathewendfunc el3_exit
930