1532ed618SSoby Mathew/* 2*0ce220afSJayanth Dodderi Chidanand * Copyright (c) 2013-2022, Arm Limited and Contributors. All rights reserved. 3532ed618SSoby Mathew * 482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause 5532ed618SSoby Mathew */ 6532ed618SSoby Mathew 7532ed618SSoby Mathew#include <arch.h> 8532ed618SSoby Mathew#include <asm_macros.S> 9bb9549baSJan Dabros#include <assert_macros.S> 10532ed618SSoby Mathew#include <context.h> 113b8456bdSManish V Badarkhe#include <el3_common_macros.S> 12532ed618SSoby Mathew 1328f39f02SMax Shvetsov#if CTX_INCLUDE_EL2_REGS 1428f39f02SMax Shvetsov .global el2_sysregs_context_save 1528f39f02SMax Shvetsov .global el2_sysregs_context_restore 16*0ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_EL2_REGS */ 1728f39f02SMax Shvetsov 18532ed618SSoby Mathew .global el1_sysregs_context_save 19532ed618SSoby Mathew .global el1_sysregs_context_restore 20532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS 21532ed618SSoby Mathew .global fpregs_context_save 22532ed618SSoby Mathew .global fpregs_context_restore 23*0ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_FPREGS */ 2497215e0fSDaniel Boulby .global prepare_el3_entry 25ed108b56SAlexei Fedorov .global restore_gp_pmcr_pauth_regs 263b8456bdSManish V Badarkhe .global save_and_update_ptw_el1_sys_regs 27532ed618SSoby Mathew .global el3_exit 28532ed618SSoby Mathew 2928f39f02SMax Shvetsov#if CTX_INCLUDE_EL2_REGS 3028f39f02SMax Shvetsov 3128f39f02SMax Shvetsov/* ----------------------------------------------------- 3228f39f02SMax Shvetsov * The following function strictly follows the AArch64 33a7cf2743SMax Shvetsov * PCS to use x9-x16 (temporary caller-saved registers) 342825946eSMax Shvetsov * to save EL2 system register context. It assumes that 352825946eSMax Shvetsov * 'x0' is pointing to a 'el2_sys_regs' structure where 3628f39f02SMax Shvetsov * the register context will be saved. 372825946eSMax Shvetsov * 382825946eSMax Shvetsov * The following registers are not added. 392825946eSMax Shvetsov * AMEVCNTVOFF0<n>_EL2 402825946eSMax Shvetsov * AMEVCNTVOFF1<n>_EL2 412825946eSMax Shvetsov * ICH_AP0R<n>_EL2 422825946eSMax Shvetsov * ICH_AP1R<n>_EL2 432825946eSMax Shvetsov * ICH_LR<n>_EL2 4428f39f02SMax Shvetsov * ----------------------------------------------------- 4528f39f02SMax Shvetsov */ 4628f39f02SMax Shvetsovfunc el2_sysregs_context_save 4728f39f02SMax Shvetsov mrs x9, actlr_el2 482825946eSMax Shvetsov mrs x10, afsr0_el2 492825946eSMax Shvetsov stp x9, x10, [x0, #CTX_ACTLR_EL2] 5028f39f02SMax Shvetsov 512825946eSMax Shvetsov mrs x11, afsr1_el2 522825946eSMax Shvetsov mrs x12, amair_el2 532825946eSMax Shvetsov stp x11, x12, [x0, #CTX_AFSR1_EL2] 5428f39f02SMax Shvetsov 552825946eSMax Shvetsov mrs x13, cnthctl_el2 56a7cf2743SMax Shvetsov mrs x14, cntvoff_el2 572825946eSMax Shvetsov stp x13, x14, [x0, #CTX_CNTHCTL_EL2] 5828f39f02SMax Shvetsov 59a7cf2743SMax Shvetsov mrs x15, cptr_el2 60a7cf2743SMax Shvetsov str x15, [x0, #CTX_CPTR_EL2] 6128f39f02SMax Shvetsov 620f777eabSArunachalam Ganapathy#if CTX_INCLUDE_AARCH32_REGS 63a7cf2743SMax Shvetsov mrs x16, dbgvcr32_el2 64a7cf2743SMax Shvetsov str x16, [x0, #CTX_DBGVCR32_EL2] 65*0ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */ 6628f39f02SMax Shvetsov 67a7cf2743SMax Shvetsov mrs x9, elr_el2 68a7cf2743SMax Shvetsov mrs x10, esr_el2 69a7cf2743SMax Shvetsov stp x9, x10, [x0, #CTX_ELR_EL2] 7028f39f02SMax Shvetsov 71a7cf2743SMax Shvetsov mrs x11, far_el2 72a7cf2743SMax Shvetsov mrs x12, hacr_el2 73a7cf2743SMax Shvetsov stp x11, x12, [x0, #CTX_FAR_EL2] 7428f39f02SMax Shvetsov 75a7cf2743SMax Shvetsov mrs x13, hcr_el2 76a7cf2743SMax Shvetsov mrs x14, hpfar_el2 77a7cf2743SMax Shvetsov stp x13, x14, [x0, #CTX_HCR_EL2] 7828f39f02SMax Shvetsov 79a7cf2743SMax Shvetsov mrs x15, hstr_el2 80a7cf2743SMax Shvetsov mrs x16, ICC_SRE_EL2 81a7cf2743SMax Shvetsov stp x15, x16, [x0, #CTX_HSTR_EL2] 8228f39f02SMax Shvetsov 83a7cf2743SMax Shvetsov mrs x9, ICH_HCR_EL2 84a7cf2743SMax Shvetsov mrs x10, ICH_VMCR_EL2 85a7cf2743SMax Shvetsov stp x9, x10, [x0, #CTX_ICH_HCR_EL2] 8628f39f02SMax Shvetsov 87a7cf2743SMax Shvetsov mrs x11, mair_el2 88a7cf2743SMax Shvetsov mrs x12, mdcr_el2 89a7cf2743SMax Shvetsov stp x11, x12, [x0, #CTX_MAIR_EL2] 90a7cf2743SMax Shvetsov 912b036b79SArunachalam Ganapathy#if ENABLE_SPE_FOR_LOWER_ELS 92a7cf2743SMax Shvetsov mrs x13, PMSCR_EL2 93a7cf2743SMax Shvetsov str x13, [x0, #CTX_PMSCR_EL2] 94*0ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_SPE_FOR_LOWER_ELS */ 95*0ce220afSJayanth Dodderi Chidanand 96a7cf2743SMax Shvetsov mrs x14, sctlr_el2 97a7cf2743SMax Shvetsov str x14, [x0, #CTX_SCTLR_EL2] 9828f39f02SMax Shvetsov 99a7cf2743SMax Shvetsov mrs x15, spsr_el2 100a7cf2743SMax Shvetsov mrs x16, sp_el2 101a7cf2743SMax Shvetsov stp x15, x16, [x0, #CTX_SPSR_EL2] 10228f39f02SMax Shvetsov 103a7cf2743SMax Shvetsov mrs x9, tcr_el2 104a7cf2743SMax Shvetsov mrs x10, tpidr_el2 105a7cf2743SMax Shvetsov stp x9, x10, [x0, #CTX_TCR_EL2] 10628f39f02SMax Shvetsov 107a7cf2743SMax Shvetsov mrs x11, ttbr0_el2 108a7cf2743SMax Shvetsov mrs x12, vbar_el2 109a7cf2743SMax Shvetsov stp x11, x12, [x0, #CTX_TTBR0_EL2] 11028f39f02SMax Shvetsov 111a7cf2743SMax Shvetsov mrs x13, vmpidr_el2 112a7cf2743SMax Shvetsov mrs x14, vpidr_el2 113a7cf2743SMax Shvetsov stp x13, x14, [x0, #CTX_VMPIDR_EL2] 11428f39f02SMax Shvetsov 115a7cf2743SMax Shvetsov mrs x15, vtcr_el2 116a7cf2743SMax Shvetsov mrs x16, vttbr_el2 117a7cf2743SMax Shvetsov stp x15, x16, [x0, #CTX_VTCR_EL2] 11828f39f02SMax Shvetsov 1192825946eSMax Shvetsov#if CTX_INCLUDE_MTE_REGS 120a7cf2743SMax Shvetsov mrs x9, TFSR_EL2 121a7cf2743SMax Shvetsov str x9, [x0, #CTX_TFSR_EL2] 122*0ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_MTE_REGS */ 12328f39f02SMax Shvetsov 1242825946eSMax Shvetsov#if ENABLE_MPAM_FOR_LOWER_ELS 125a7cf2743SMax Shvetsov mrs x10, MPAM2_EL2 126a7cf2743SMax Shvetsov str x10, [x0, #CTX_MPAM2_EL2] 1272825946eSMax Shvetsov 128a7cf2743SMax Shvetsov mrs x11, MPAMHCR_EL2 129a7cf2743SMax Shvetsov mrs x12, MPAMVPM0_EL2 130a7cf2743SMax Shvetsov stp x11, x12, [x0, #CTX_MPAMHCR_EL2] 1312825946eSMax Shvetsov 132a7cf2743SMax Shvetsov mrs x13, MPAMVPM1_EL2 133a7cf2743SMax Shvetsov mrs x14, MPAMVPM2_EL2 134a7cf2743SMax Shvetsov stp x13, x14, [x0, #CTX_MPAMVPM1_EL2] 1352825946eSMax Shvetsov 136a7cf2743SMax Shvetsov mrs x15, MPAMVPM3_EL2 137a7cf2743SMax Shvetsov mrs x16, MPAMVPM4_EL2 138a7cf2743SMax Shvetsov stp x15, x16, [x0, #CTX_MPAMVPM3_EL2] 1392825946eSMax Shvetsov 140a7cf2743SMax Shvetsov mrs x9, MPAMVPM5_EL2 141a7cf2743SMax Shvetsov mrs x10, MPAMVPM6_EL2 142a7cf2743SMax Shvetsov stp x9, x10, [x0, #CTX_MPAMVPM5_EL2] 1432825946eSMax Shvetsov 144a7cf2743SMax Shvetsov mrs x11, MPAMVPM7_EL2 145a7cf2743SMax Shvetsov mrs x12, MPAMVPMV_EL2 146a7cf2743SMax Shvetsov stp x11, x12, [x0, #CTX_MPAMVPM7_EL2] 147*0ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_MPAM_FOR_LOWER_ELS */ 1482825946eSMax Shvetsov 149f74cb0beSJayanth Dodderi Chidanand#if ENABLE_FEAT_FGT 150f74cb0beSJayanth Dodderi Chidanand mrs x13, HDFGRTR_EL2 151f74cb0beSJayanth Dodderi Chidanand#if ENABLE_FEAT_AMUv1 152f74cb0beSJayanth Dodderi Chidanand mrs x14, HAFGRTR_EL2 153f74cb0beSJayanth Dodderi Chidanand stp x13, x14, [x0, #CTX_HDFGRTR_EL2] 154f74cb0beSJayanth Dodderi Chidanand#else 155f74cb0beSJayanth Dodderi Chidanand str x13, [x0, #CTX_HDFGRTR_EL2] 156*0ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_FEAT_AMUv1 */ 157a7cf2743SMax Shvetsov mrs x15, HDFGWTR_EL2 158a7cf2743SMax Shvetsov mrs x16, HFGITR_EL2 159a7cf2743SMax Shvetsov stp x15, x16, [x0, #CTX_HDFGWTR_EL2] 1602825946eSMax Shvetsov 161a7cf2743SMax Shvetsov mrs x9, HFGRTR_EL2 162a7cf2743SMax Shvetsov mrs x10, HFGWTR_EL2 163a7cf2743SMax Shvetsov stp x9, x10, [x0, #CTX_HFGRTR_EL2] 164*0ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_FEAT_FGT */ 1652825946eSMax Shvetsov 166f74cb0beSJayanth Dodderi Chidanand#if ENABLE_FEAT_ECV 167a7cf2743SMax Shvetsov mrs x11, CNTPOFF_EL2 168a7cf2743SMax Shvetsov str x11, [x0, #CTX_CNTPOFF_EL2] 169*0ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_FEAT_ECV */ 1702825946eSMax Shvetsov 171*0ce220afSJayanth Dodderi Chidanand#if ENABLE_FEAT_VHE 172*0ce220afSJayanth Dodderi Chidanand /* 173*0ce220afSJayanth Dodderi Chidanand * CONTEXTIDR_EL2 register is saved only when FEAT_VHE or 174*0ce220afSJayanth Dodderi Chidanand * FEAT_Debugv8p2 (currently not in TF-A) is supported. 175*0ce220afSJayanth Dodderi Chidanand */ 176*0ce220afSJayanth Dodderi Chidanand mrs x9, contextidr_el2 177*0ce220afSJayanth Dodderi Chidanand mrs x10, ttbr1_el2 178*0ce220afSJayanth Dodderi Chidanand stp x9, x10, [x0, #CTX_CONTEXTIDR_EL2] 179*0ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_FEAT_VHE */ 1802825946eSMax Shvetsov 181*0ce220afSJayanth Dodderi Chidanand#if RAS_EXTENSION 182*0ce220afSJayanth Dodderi Chidanand /* 183*0ce220afSJayanth Dodderi Chidanand * VDISR_EL2 and VSESR_EL2 registers are saved only when 184*0ce220afSJayanth Dodderi Chidanand * FEAT_RAS is supported. 185*0ce220afSJayanth Dodderi Chidanand */ 186*0ce220afSJayanth Dodderi Chidanand mrs x11, vdisr_el2 187*0ce220afSJayanth Dodderi Chidanand mrs x12, vsesr_el2 188*0ce220afSJayanth Dodderi Chidanand stp x11, x12, [x0, #CTX_VDISR_EL2] 189*0ce220afSJayanth Dodderi Chidanand#endif /* RAS_EXTENSION */ 190*0ce220afSJayanth Dodderi Chidanand 191*0ce220afSJayanth Dodderi Chidanand#if ENABLE_FEAT_SEL2 192*0ce220afSJayanth Dodderi Chidanand /* 193*0ce220afSJayanth Dodderi Chidanand * VSTCR_EL2 and VSTTBR_EL2 registers are saved only 194*0ce220afSJayanth Dodderi Chidanand * when FEAT_SEL2 is supported. 195*0ce220afSJayanth Dodderi Chidanand */ 196*0ce220afSJayanth Dodderi Chidanand mrs x13, vstcr_el2 197*0ce220afSJayanth Dodderi Chidanand mrs x14, vsttbr_el2 198*0ce220afSJayanth Dodderi Chidanand stp x13, x14, [x0, #CTX_VSTCR_EL2] 199*0ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_FEAT_SEL2 */ 200*0ce220afSJayanth Dodderi Chidanand 201*0ce220afSJayanth Dodderi Chidanand#if CTX_INCLUDE_AARCH32_REGS && ENABLE_FEAT_SEL2 202*0ce220afSJayanth Dodderi Chidanand /* 203*0ce220afSJayanth Dodderi Chidanand * SDER32_EL2 register is saved only when EL2 and EL1 204*0ce220afSJayanth Dodderi Chidanand * capable of using Aarch32 and FEAT_SEL2 is supported. 205*0ce220afSJayanth Dodderi Chidanand */ 206*0ce220afSJayanth Dodderi Chidanand mrs x15, sder32_el2 207*0ce220afSJayanth Dodderi Chidanand str x15, [x0, #CTX_SDER32_EL2] 208*0ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS && ENABLE_FEAT_SEL2 */ 2092825946eSMax Shvetsov 210062f8aafSArunachalam Ganapathy#if CTX_INCLUDE_NEVE_REGS 211*0ce220afSJayanth Dodderi Chidanand /* 212*0ce220afSJayanth Dodderi Chidanand * VNCR_EL2 register is saved only when FEAT_NV2 is supported. 213*0ce220afSJayanth Dodderi Chidanand */ 214a7cf2743SMax Shvetsov mrs x16, vncr_el2 215a7cf2743SMax Shvetsov str x16, [x0, #CTX_VNCR_EL2] 216*0ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_NEVE_REGS */ 2172825946eSMax Shvetsov 218*0ce220afSJayanth Dodderi Chidanand#if ENABLE_TRF_FOR_NS 219*0ce220afSJayanth Dodderi Chidanand /* 220*0ce220afSJayanth Dodderi Chidanand * TRFCR_EL2 register is saved only when FEAT_TRF is supported. 221*0ce220afSJayanth Dodderi Chidanand */ 222a7cf2743SMax Shvetsov mrs x12, TRFCR_EL2 223*0ce220afSJayanth Dodderi Chidanand str x12, [x0, #CTX_TRFCR_EL2] 224*0ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_TRF_FOR_NS */ 2252825946eSMax Shvetsov 226*0ce220afSJayanth Dodderi Chidanand#if ENABLE_FEAT_CSV2_2 227*0ce220afSJayanth Dodderi Chidanand /* 228*0ce220afSJayanth Dodderi Chidanand * SCXTNUM_EL2 register is saved only when FEAT_CSV2_2 is supported. 229*0ce220afSJayanth Dodderi Chidanand */ 230a7cf2743SMax Shvetsov mrs x13, scxtnum_el2 231a7cf2743SMax Shvetsov str x13, [x0, #CTX_SCXTNUM_EL2] 232*0ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_FEAT_CSV2_2 */ 23328f39f02SMax Shvetsov 234cb4ec47bSjohpow01#if ENABLE_FEAT_HCX 235cb4ec47bSjohpow01 mrs x14, hcrx_el2 236cb4ec47bSjohpow01 str x14, [x0, #CTX_HCRX_EL2] 237*0ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_FEAT_HCX */ 238cb4ec47bSjohpow01 23928f39f02SMax Shvetsov ret 24028f39f02SMax Shvetsovendfunc el2_sysregs_context_save 24128f39f02SMax Shvetsov 242a7cf2743SMax Shvetsov 24328f39f02SMax Shvetsov/* ----------------------------------------------------- 24428f39f02SMax Shvetsov * The following function strictly follows the AArch64 245a7cf2743SMax Shvetsov * PCS to use x9-x16 (temporary caller-saved registers) 2462825946eSMax Shvetsov * to restore EL2 system register context. It assumes 2472825946eSMax Shvetsov * that 'x0' is pointing to a 'el2_sys_regs' structure 24828f39f02SMax Shvetsov * from where the register context will be restored 2492825946eSMax Shvetsov 2502825946eSMax Shvetsov * The following registers are not restored 2512825946eSMax Shvetsov * AMEVCNTVOFF0<n>_EL2 2522825946eSMax Shvetsov * AMEVCNTVOFF1<n>_EL2 2532825946eSMax Shvetsov * ICH_AP0R<n>_EL2 2542825946eSMax Shvetsov * ICH_AP1R<n>_EL2 2552825946eSMax Shvetsov * ICH_LR<n>_EL2 25628f39f02SMax Shvetsov * ----------------------------------------------------- 25728f39f02SMax Shvetsov */ 25828f39f02SMax Shvetsovfunc el2_sysregs_context_restore 2592825946eSMax Shvetsov ldp x9, x10, [x0, #CTX_ACTLR_EL2] 26028f39f02SMax Shvetsov msr actlr_el2, x9 2612825946eSMax Shvetsov msr afsr0_el2, x10 26228f39f02SMax Shvetsov 2632825946eSMax Shvetsov ldp x11, x12, [x0, #CTX_AFSR1_EL2] 2642825946eSMax Shvetsov msr afsr1_el2, x11 2652825946eSMax Shvetsov msr amair_el2, x12 26628f39f02SMax Shvetsov 2672825946eSMax Shvetsov ldp x13, x14, [x0, #CTX_CNTHCTL_EL2] 2682825946eSMax Shvetsov msr cnthctl_el2, x13 269a7cf2743SMax Shvetsov msr cntvoff_el2, x14 27028f39f02SMax Shvetsov 271a7cf2743SMax Shvetsov ldr x15, [x0, #CTX_CPTR_EL2] 272a7cf2743SMax Shvetsov msr cptr_el2, x15 27328f39f02SMax Shvetsov 2740f777eabSArunachalam Ganapathy#if CTX_INCLUDE_AARCH32_REGS 275a7cf2743SMax Shvetsov ldr x16, [x0, #CTX_DBGVCR32_EL2] 276a7cf2743SMax Shvetsov msr dbgvcr32_el2, x16 277*0ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */ 27828f39f02SMax Shvetsov 279a7cf2743SMax Shvetsov ldp x9, x10, [x0, #CTX_ELR_EL2] 280a7cf2743SMax Shvetsov msr elr_el2, x9 281a7cf2743SMax Shvetsov msr esr_el2, x10 28228f39f02SMax Shvetsov 283a7cf2743SMax Shvetsov ldp x11, x12, [x0, #CTX_FAR_EL2] 284a7cf2743SMax Shvetsov msr far_el2, x11 285a7cf2743SMax Shvetsov msr hacr_el2, x12 28628f39f02SMax Shvetsov 287a7cf2743SMax Shvetsov ldp x13, x14, [x0, #CTX_HCR_EL2] 288a7cf2743SMax Shvetsov msr hcr_el2, x13 289a7cf2743SMax Shvetsov msr hpfar_el2, x14 29028f39f02SMax Shvetsov 291a7cf2743SMax Shvetsov ldp x15, x16, [x0, #CTX_HSTR_EL2] 292a7cf2743SMax Shvetsov msr hstr_el2, x15 293a7cf2743SMax Shvetsov msr ICC_SRE_EL2, x16 29428f39f02SMax Shvetsov 295a7cf2743SMax Shvetsov ldp x9, x10, [x0, #CTX_ICH_HCR_EL2] 296a7cf2743SMax Shvetsov msr ICH_HCR_EL2, x9 297a7cf2743SMax Shvetsov msr ICH_VMCR_EL2, x10 298a7cf2743SMax Shvetsov 299a7cf2743SMax Shvetsov ldp x11, x12, [x0, #CTX_MAIR_EL2] 300a7cf2743SMax Shvetsov msr mair_el2, x11 301a7cf2743SMax Shvetsov msr mdcr_el2, x12 30228f39f02SMax Shvetsov 3032b036b79SArunachalam Ganapathy#if ENABLE_SPE_FOR_LOWER_ELS 304a7cf2743SMax Shvetsov ldr x13, [x0, #CTX_PMSCR_EL2] 305a7cf2743SMax Shvetsov msr PMSCR_EL2, x13 306*0ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_SPE_FOR_LOWER_ELS */ 307*0ce220afSJayanth Dodderi Chidanand 308a7cf2743SMax Shvetsov ldr x14, [x0, #CTX_SCTLR_EL2] 309a7cf2743SMax Shvetsov msr sctlr_el2, x14 31028f39f02SMax Shvetsov 311a7cf2743SMax Shvetsov ldp x15, x16, [x0, #CTX_SPSR_EL2] 312a7cf2743SMax Shvetsov msr spsr_el2, x15 313a7cf2743SMax Shvetsov msr sp_el2, x16 31428f39f02SMax Shvetsov 315a7cf2743SMax Shvetsov ldp x9, x10, [x0, #CTX_TCR_EL2] 316a7cf2743SMax Shvetsov msr tcr_el2, x9 317a7cf2743SMax Shvetsov msr tpidr_el2, x10 31828f39f02SMax Shvetsov 319a7cf2743SMax Shvetsov ldp x11, x12, [x0, #CTX_TTBR0_EL2] 320a7cf2743SMax Shvetsov msr ttbr0_el2, x11 321a7cf2743SMax Shvetsov msr vbar_el2, x12 32228f39f02SMax Shvetsov 323a7cf2743SMax Shvetsov ldp x13, x14, [x0, #CTX_VMPIDR_EL2] 324a7cf2743SMax Shvetsov msr vmpidr_el2, x13 325a7cf2743SMax Shvetsov msr vpidr_el2, x14 32628f39f02SMax Shvetsov 327a7cf2743SMax Shvetsov ldp x15, x16, [x0, #CTX_VTCR_EL2] 328a7cf2743SMax Shvetsov msr vtcr_el2, x15 329a7cf2743SMax Shvetsov msr vttbr_el2, x16 33028f39f02SMax Shvetsov 3312825946eSMax Shvetsov#if CTX_INCLUDE_MTE_REGS 332fb2072b0SManish V Badarkhe ldr x9, [x0, #CTX_TFSR_EL2] 333fb2072b0SManish V Badarkhe msr TFSR_EL2, x9 334*0ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_MTE_REGS */ 33528f39f02SMax Shvetsov 3362825946eSMax Shvetsov#if ENABLE_MPAM_FOR_LOWER_ELS 337a7cf2743SMax Shvetsov ldr x10, [x0, #CTX_MPAM2_EL2] 338fb2072b0SManish V Badarkhe msr MPAM2_EL2, x10 339a7cf2743SMax Shvetsov 340a7cf2743SMax Shvetsov ldp x11, x12, [x0, #CTX_MPAMHCR_EL2] 341fb2072b0SManish V Badarkhe msr MPAMHCR_EL2, x11 342fb2072b0SManish V Badarkhe msr MPAMVPM0_EL2, x12 343a7cf2743SMax Shvetsov 344a7cf2743SMax Shvetsov ldp x13, x14, [x0, #CTX_MPAMVPM1_EL2] 345fb2072b0SManish V Badarkhe msr MPAMVPM1_EL2, x13 346fb2072b0SManish V Badarkhe msr MPAMVPM2_EL2, x14 347a7cf2743SMax Shvetsov 348a7cf2743SMax Shvetsov ldp x15, x16, [x0, #CTX_MPAMVPM3_EL2] 349fb2072b0SManish V Badarkhe msr MPAMVPM3_EL2, x15 350fb2072b0SManish V Badarkhe msr MPAMVPM4_EL2, x16 3512825946eSMax Shvetsov 352a7cf2743SMax Shvetsov ldp x9, x10, [x0, #CTX_MPAMVPM5_EL2] 353a7cf2743SMax Shvetsov msr MPAMVPM5_EL2, x9 354a7cf2743SMax Shvetsov msr MPAMVPM6_EL2, x10 3552825946eSMax Shvetsov 356a7cf2743SMax Shvetsov ldp x11, x12, [x0, #CTX_MPAMVPM7_EL2] 357a7cf2743SMax Shvetsov msr MPAMVPM7_EL2, x11 358a7cf2743SMax Shvetsov msr MPAMVPMV_EL2, x12 359*0ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_MPAM_FOR_LOWER_ELS */ 3602825946eSMax Shvetsov 361f74cb0beSJayanth Dodderi Chidanand#if ENABLE_FEAT_FGT 362f74cb0beSJayanth Dodderi Chidanand#if ENABLE_FEAT_AMUv1 363f74cb0beSJayanth Dodderi Chidanand ldp x13, x14, [x0, #CTX_HDFGRTR_EL2] 364f74cb0beSJayanth Dodderi Chidanand msr HAFGRTR_EL2, x14 365f74cb0beSJayanth Dodderi Chidanand#else 366f74cb0beSJayanth Dodderi Chidanand ldr x13, [x0, #CTX_HDFGRTR_EL2] 367*0ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_FEAT_AMUv1 */ 368f74cb0beSJayanth Dodderi Chidanand msr HDFGRTR_EL2, x13 3692825946eSMax Shvetsov 370a7cf2743SMax Shvetsov ldp x15, x16, [x0, #CTX_HDFGWTR_EL2] 371a7cf2743SMax Shvetsov msr HDFGWTR_EL2, x15 372a7cf2743SMax Shvetsov msr HFGITR_EL2, x16 3732825946eSMax Shvetsov 374a7cf2743SMax Shvetsov ldp x9, x10, [x0, #CTX_HFGRTR_EL2] 375a7cf2743SMax Shvetsov msr HFGRTR_EL2, x9 376a7cf2743SMax Shvetsov msr HFGWTR_EL2, x10 377*0ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_FEAT_FGT */ 3782825946eSMax Shvetsov 379f74cb0beSJayanth Dodderi Chidanand#if ENABLE_FEAT_ECV 380a7cf2743SMax Shvetsov ldr x11, [x0, #CTX_CNTPOFF_EL2] 381a7cf2743SMax Shvetsov msr CNTPOFF_EL2, x11 382*0ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_FEAT_ECV */ 3832825946eSMax Shvetsov 384*0ce220afSJayanth Dodderi Chidanand#if ENABLE_FEAT_VHE 385*0ce220afSJayanth Dodderi Chidanand /* 386*0ce220afSJayanth Dodderi Chidanand * CONTEXTIDR_EL2 register is restored only when FEAT_VHE or 387*0ce220afSJayanth Dodderi Chidanand * FEAT_Debugv8p2 (currently not in TF-A) is supported. 388*0ce220afSJayanth Dodderi Chidanand */ 389*0ce220afSJayanth Dodderi Chidanand ldp x9, x10, [x0, #CTX_CONTEXTIDR_EL2] 390*0ce220afSJayanth Dodderi Chidanand msr contextidr_el2, x9 391*0ce220afSJayanth Dodderi Chidanand msr ttbr1_el2, x10 392*0ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_FEAT_VHE */ 3932825946eSMax Shvetsov 394*0ce220afSJayanth Dodderi Chidanand#if RAS_EXTENSION 395*0ce220afSJayanth Dodderi Chidanand /* 396*0ce220afSJayanth Dodderi Chidanand * VDISR_EL2 and VSESR_EL2 registers are restored only when FEAT_RAS 397*0ce220afSJayanth Dodderi Chidanand * is supported. 398*0ce220afSJayanth Dodderi Chidanand */ 399*0ce220afSJayanth Dodderi Chidanand ldp x11, x12, [x0, #CTX_VDISR_EL2] 400*0ce220afSJayanth Dodderi Chidanand msr vdisr_el2, x11 401*0ce220afSJayanth Dodderi Chidanand msr vsesr_el2, x12 402*0ce220afSJayanth Dodderi Chidanand#endif /* RAS_EXTENSION */ 403*0ce220afSJayanth Dodderi Chidanand 404*0ce220afSJayanth Dodderi Chidanand#if ENABLE_FEAT_SEL2 405*0ce220afSJayanth Dodderi Chidanand /* 406*0ce220afSJayanth Dodderi Chidanand * VSTCR_EL2 and VSTTBR_EL2 registers are restored only when FEAT_SEL2 407*0ce220afSJayanth Dodderi Chidanand * is supported. 408*0ce220afSJayanth Dodderi Chidanand */ 409*0ce220afSJayanth Dodderi Chidanand ldp x13, x14, [x0, #CTX_VSTCR_EL2] 410*0ce220afSJayanth Dodderi Chidanand msr vstcr_el2, x13 411*0ce220afSJayanth Dodderi Chidanand msr vsttbr_el2, x14 412*0ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_FEAT_SEL2 */ 413*0ce220afSJayanth Dodderi Chidanand 414*0ce220afSJayanth Dodderi Chidanand#if CTX_INCLUDE_AARCH32_REGS && ENABLE_FEAT_SEL2 415*0ce220afSJayanth Dodderi Chidanand /* 416*0ce220afSJayanth Dodderi Chidanand * SDER32_EL2 register is restored only when EL2 and EL1 capable of using 417*0ce220afSJayanth Dodderi Chidanand * Aarch32 and FEAT_SEL2 is supported. 418*0ce220afSJayanth Dodderi Chidanand */ 419*0ce220afSJayanth Dodderi Chidanand ldr x15, [x0, #CTX_SDER32_EL2] 420*0ce220afSJayanth Dodderi Chidanand msr sder32_el2, x15 421*0ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS && ENABLE_FEAT_SEL2 */ 4222825946eSMax Shvetsov 423062f8aafSArunachalam Ganapathy#if CTX_INCLUDE_NEVE_REGS 424*0ce220afSJayanth Dodderi Chidanand /* 425*0ce220afSJayanth Dodderi Chidanand * VNCR_EL2 register is restored only when FEAT_NV2 is supported. 426*0ce220afSJayanth Dodderi Chidanand */ 427a7cf2743SMax Shvetsov ldr x16, [x0, #CTX_VNCR_EL2] 428a7cf2743SMax Shvetsov msr vncr_el2, x16 429*0ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_NEVE_REGS */ 4302825946eSMax Shvetsov 431*0ce220afSJayanth Dodderi Chidanand#if ENABLE_TRF_FOR_NS 432*0ce220afSJayanth Dodderi Chidanand /* 433*0ce220afSJayanth Dodderi Chidanand * TRFCR_EL2 register is restored only when FEAT_TRF is supported. 434*0ce220afSJayanth Dodderi Chidanand */ 435*0ce220afSJayanth Dodderi Chidanand ldr x12, [x0, #CTX_TRFCR_EL2] 436a7cf2743SMax Shvetsov msr TRFCR_EL2, x12 437*0ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_TRF_FOR_NS */ 4382825946eSMax Shvetsov 439*0ce220afSJayanth Dodderi Chidanand#if ENABLE_FEAT_CSV2_2 440*0ce220afSJayanth Dodderi Chidanand /* 441*0ce220afSJayanth Dodderi Chidanand * SCXTNUM_EL2 register is restored only when FEAT_CSV2_2 is supported. 442*0ce220afSJayanth Dodderi Chidanand */ 443a7cf2743SMax Shvetsov ldr x13, [x0, #CTX_SCXTNUM_EL2] 444a7cf2743SMax Shvetsov msr scxtnum_el2, x13 445*0ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_FEAT_CSV2_2 */ 44628f39f02SMax Shvetsov 447cb4ec47bSjohpow01#if ENABLE_FEAT_HCX 448cb4ec47bSjohpow01 ldr x14, [x0, #CTX_HCRX_EL2] 449cb4ec47bSjohpow01 msr hcrx_el2, x14 450*0ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_FEAT_HCX */ 451cb4ec47bSjohpow01 45228f39f02SMax Shvetsov ret 45328f39f02SMax Shvetsovendfunc el2_sysregs_context_restore 45428f39f02SMax Shvetsov 45528f39f02SMax Shvetsov#endif /* CTX_INCLUDE_EL2_REGS */ 45628f39f02SMax Shvetsov 457ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 458ed108b56SAlexei Fedorov * The following function strictly follows the AArch64 PCS to use 459ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers) to save EL1 system 460ed108b56SAlexei Fedorov * register context. It assumes that 'x0' is pointing to a 461ed108b56SAlexei Fedorov * 'el1_sys_regs' structure where the register context will be saved. 462ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 463532ed618SSoby Mathew */ 464532ed618SSoby Mathewfunc el1_sysregs_context_save 465532ed618SSoby Mathew 466532ed618SSoby Mathew mrs x9, spsr_el1 467532ed618SSoby Mathew mrs x10, elr_el1 468532ed618SSoby Mathew stp x9, x10, [x0, #CTX_SPSR_EL1] 469532ed618SSoby Mathew 4703b8456bdSManish V Badarkhe#if !ERRATA_SPECULATIVE_AT 471532ed618SSoby Mathew mrs x15, sctlr_el1 472cb55615cSManish V Badarkhe mrs x16, tcr_el1 473532ed618SSoby Mathew stp x15, x16, [x0, #CTX_SCTLR_EL1] 474*0ce220afSJayanth Dodderi Chidanand#endif /* ERRATA_SPECULATIVE_AT */ 475532ed618SSoby Mathew 476532ed618SSoby Mathew mrs x17, cpacr_el1 477532ed618SSoby Mathew mrs x9, csselr_el1 478532ed618SSoby Mathew stp x17, x9, [x0, #CTX_CPACR_EL1] 479532ed618SSoby Mathew 480532ed618SSoby Mathew mrs x10, sp_el1 481532ed618SSoby Mathew mrs x11, esr_el1 482532ed618SSoby Mathew stp x10, x11, [x0, #CTX_SP_EL1] 483532ed618SSoby Mathew 484532ed618SSoby Mathew mrs x12, ttbr0_el1 485532ed618SSoby Mathew mrs x13, ttbr1_el1 486532ed618SSoby Mathew stp x12, x13, [x0, #CTX_TTBR0_EL1] 487532ed618SSoby Mathew 488532ed618SSoby Mathew mrs x14, mair_el1 489532ed618SSoby Mathew mrs x15, amair_el1 490532ed618SSoby Mathew stp x14, x15, [x0, #CTX_MAIR_EL1] 491532ed618SSoby Mathew 492cb55615cSManish V Badarkhe mrs x16, actlr_el1 493532ed618SSoby Mathew mrs x17, tpidr_el1 494cb55615cSManish V Badarkhe stp x16, x17, [x0, #CTX_ACTLR_EL1] 495532ed618SSoby Mathew 496532ed618SSoby Mathew mrs x9, tpidr_el0 497532ed618SSoby Mathew mrs x10, tpidrro_el0 498532ed618SSoby Mathew stp x9, x10, [x0, #CTX_TPIDR_EL0] 499532ed618SSoby Mathew 500532ed618SSoby Mathew mrs x13, par_el1 501532ed618SSoby Mathew mrs x14, far_el1 502532ed618SSoby Mathew stp x13, x14, [x0, #CTX_PAR_EL1] 503532ed618SSoby Mathew 504532ed618SSoby Mathew mrs x15, afsr0_el1 505532ed618SSoby Mathew mrs x16, afsr1_el1 506532ed618SSoby Mathew stp x15, x16, [x0, #CTX_AFSR0_EL1] 507532ed618SSoby Mathew 508532ed618SSoby Mathew mrs x17, contextidr_el1 509532ed618SSoby Mathew mrs x9, vbar_el1 510532ed618SSoby Mathew stp x17, x9, [x0, #CTX_CONTEXTIDR_EL1] 511532ed618SSoby Mathew 512532ed618SSoby Mathew /* Save AArch32 system registers if the build has instructed so */ 513532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS 514532ed618SSoby Mathew mrs x11, spsr_abt 515532ed618SSoby Mathew mrs x12, spsr_und 516532ed618SSoby Mathew stp x11, x12, [x0, #CTX_SPSR_ABT] 517532ed618SSoby Mathew 518532ed618SSoby Mathew mrs x13, spsr_irq 519532ed618SSoby Mathew mrs x14, spsr_fiq 520532ed618SSoby Mathew stp x13, x14, [x0, #CTX_SPSR_IRQ] 521532ed618SSoby Mathew 522532ed618SSoby Mathew mrs x15, dacr32_el2 523532ed618SSoby Mathew mrs x16, ifsr32_el2 524532ed618SSoby Mathew stp x15, x16, [x0, #CTX_DACR32_EL2] 525*0ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */ 526532ed618SSoby Mathew 527532ed618SSoby Mathew /* Save NS timer registers if the build has instructed so */ 528532ed618SSoby Mathew#if NS_TIMER_SWITCH 529532ed618SSoby Mathew mrs x10, cntp_ctl_el0 530532ed618SSoby Mathew mrs x11, cntp_cval_el0 531532ed618SSoby Mathew stp x10, x11, [x0, #CTX_CNTP_CTL_EL0] 532532ed618SSoby Mathew 533532ed618SSoby Mathew mrs x12, cntv_ctl_el0 534532ed618SSoby Mathew mrs x13, cntv_cval_el0 535532ed618SSoby Mathew stp x12, x13, [x0, #CTX_CNTV_CTL_EL0] 536532ed618SSoby Mathew 537532ed618SSoby Mathew mrs x14, cntkctl_el1 538532ed618SSoby Mathew str x14, [x0, #CTX_CNTKCTL_EL1] 539*0ce220afSJayanth Dodderi Chidanand#endif /* NS_TIMER_SWITCH */ 540532ed618SSoby Mathew 5419dd94382SJustin Chadwell /* Save MTE system registers if the build has instructed so */ 5429dd94382SJustin Chadwell#if CTX_INCLUDE_MTE_REGS 5439dd94382SJustin Chadwell mrs x15, TFSRE0_EL1 5449dd94382SJustin Chadwell mrs x16, TFSR_EL1 5459dd94382SJustin Chadwell stp x15, x16, [x0, #CTX_TFSRE0_EL1] 5469dd94382SJustin Chadwell 5479dd94382SJustin Chadwell mrs x9, RGSR_EL1 5489dd94382SJustin Chadwell mrs x10, GCR_EL1 5499dd94382SJustin Chadwell stp x9, x10, [x0, #CTX_RGSR_EL1] 550*0ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_MTE_REGS */ 5519dd94382SJustin Chadwell 552532ed618SSoby Mathew ret 553532ed618SSoby Mathewendfunc el1_sysregs_context_save 554532ed618SSoby Mathew 555ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 556ed108b56SAlexei Fedorov * The following function strictly follows the AArch64 PCS to use 557ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers) to restore EL1 system 558ed108b56SAlexei Fedorov * register context. It assumes that 'x0' is pointing to a 559ed108b56SAlexei Fedorov * 'el1_sys_regs' structure from where the register context will be 560ed108b56SAlexei Fedorov * restored 561ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 562532ed618SSoby Mathew */ 563532ed618SSoby Mathewfunc el1_sysregs_context_restore 564532ed618SSoby Mathew 565532ed618SSoby Mathew ldp x9, x10, [x0, #CTX_SPSR_EL1] 566532ed618SSoby Mathew msr spsr_el1, x9 567532ed618SSoby Mathew msr elr_el1, x10 568532ed618SSoby Mathew 5693b8456bdSManish V Badarkhe#if !ERRATA_SPECULATIVE_AT 570fb2072b0SManish V Badarkhe ldp x15, x16, [x0, #CTX_SCTLR_EL1] 571fb2072b0SManish V Badarkhe msr sctlr_el1, x15 572cb55615cSManish V Badarkhe msr tcr_el1, x16 573*0ce220afSJayanth Dodderi Chidanand#endif /* ERRATA_SPECULATIVE_AT */ 574532ed618SSoby Mathew 575532ed618SSoby Mathew ldp x17, x9, [x0, #CTX_CPACR_EL1] 576532ed618SSoby Mathew msr cpacr_el1, x17 577532ed618SSoby Mathew msr csselr_el1, x9 578532ed618SSoby Mathew 579532ed618SSoby Mathew ldp x10, x11, [x0, #CTX_SP_EL1] 580532ed618SSoby Mathew msr sp_el1, x10 581532ed618SSoby Mathew msr esr_el1, x11 582532ed618SSoby Mathew 583532ed618SSoby Mathew ldp x12, x13, [x0, #CTX_TTBR0_EL1] 584532ed618SSoby Mathew msr ttbr0_el1, x12 585532ed618SSoby Mathew msr ttbr1_el1, x13 586532ed618SSoby Mathew 587532ed618SSoby Mathew ldp x14, x15, [x0, #CTX_MAIR_EL1] 588532ed618SSoby Mathew msr mair_el1, x14 589532ed618SSoby Mathew msr amair_el1, x15 590532ed618SSoby Mathew 591cb55615cSManish V Badarkhe ldp x16, x17, [x0, #CTX_ACTLR_EL1] 592cb55615cSManish V Badarkhe msr actlr_el1, x16 593fb2072b0SManish V Badarkhe msr tpidr_el1, x17 594532ed618SSoby Mathew 595532ed618SSoby Mathew ldp x9, x10, [x0, #CTX_TPIDR_EL0] 596532ed618SSoby Mathew msr tpidr_el0, x9 597532ed618SSoby Mathew msr tpidrro_el0, x10 598532ed618SSoby Mathew 599532ed618SSoby Mathew ldp x13, x14, [x0, #CTX_PAR_EL1] 600532ed618SSoby Mathew msr par_el1, x13 601532ed618SSoby Mathew msr far_el1, x14 602532ed618SSoby Mathew 603532ed618SSoby Mathew ldp x15, x16, [x0, #CTX_AFSR0_EL1] 604532ed618SSoby Mathew msr afsr0_el1, x15 605532ed618SSoby Mathew msr afsr1_el1, x16 606532ed618SSoby Mathew 607532ed618SSoby Mathew ldp x17, x9, [x0, #CTX_CONTEXTIDR_EL1] 608532ed618SSoby Mathew msr contextidr_el1, x17 609532ed618SSoby Mathew msr vbar_el1, x9 610532ed618SSoby Mathew 611532ed618SSoby Mathew /* Restore AArch32 system registers if the build has instructed so */ 612532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS 613532ed618SSoby Mathew ldp x11, x12, [x0, #CTX_SPSR_ABT] 614532ed618SSoby Mathew msr spsr_abt, x11 615532ed618SSoby Mathew msr spsr_und, x12 616532ed618SSoby Mathew 617532ed618SSoby Mathew ldp x13, x14, [x0, #CTX_SPSR_IRQ] 618532ed618SSoby Mathew msr spsr_irq, x13 619532ed618SSoby Mathew msr spsr_fiq, x14 620532ed618SSoby Mathew 621532ed618SSoby Mathew ldp x15, x16, [x0, #CTX_DACR32_EL2] 622532ed618SSoby Mathew msr dacr32_el2, x15 623532ed618SSoby Mathew msr ifsr32_el2, x16 624*0ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */ 625*0ce220afSJayanth Dodderi Chidanand 626532ed618SSoby Mathew /* Restore NS timer registers if the build has instructed so */ 627532ed618SSoby Mathew#if NS_TIMER_SWITCH 628532ed618SSoby Mathew ldp x10, x11, [x0, #CTX_CNTP_CTL_EL0] 629532ed618SSoby Mathew msr cntp_ctl_el0, x10 630532ed618SSoby Mathew msr cntp_cval_el0, x11 631532ed618SSoby Mathew 632532ed618SSoby Mathew ldp x12, x13, [x0, #CTX_CNTV_CTL_EL0] 633532ed618SSoby Mathew msr cntv_ctl_el0, x12 634532ed618SSoby Mathew msr cntv_cval_el0, x13 635532ed618SSoby Mathew 636532ed618SSoby Mathew ldr x14, [x0, #CTX_CNTKCTL_EL1] 637532ed618SSoby Mathew msr cntkctl_el1, x14 638*0ce220afSJayanth Dodderi Chidanand#endif /* NS_TIMER_SWITCH */ 639*0ce220afSJayanth Dodderi Chidanand 6409dd94382SJustin Chadwell /* Restore MTE system registers if the build has instructed so */ 6419dd94382SJustin Chadwell#if CTX_INCLUDE_MTE_REGS 6429dd94382SJustin Chadwell ldp x11, x12, [x0, #CTX_TFSRE0_EL1] 6439dd94382SJustin Chadwell msr TFSRE0_EL1, x11 6449dd94382SJustin Chadwell msr TFSR_EL1, x12 6459dd94382SJustin Chadwell 6469dd94382SJustin Chadwell ldp x13, x14, [x0, #CTX_RGSR_EL1] 6479dd94382SJustin Chadwell msr RGSR_EL1, x13 6489dd94382SJustin Chadwell msr GCR_EL1, x14 649*0ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_MTE_REGS */ 650532ed618SSoby Mathew 651532ed618SSoby Mathew /* No explict ISB required here as ERET covers it */ 652532ed618SSoby Mathew ret 653532ed618SSoby Mathewendfunc el1_sysregs_context_restore 654532ed618SSoby Mathew 655ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 656ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use 657ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers according to AArch64 PCS) 658ed108b56SAlexei Fedorov * to save floating point register context. It assumes that 'x0' is 659ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure where the register context will 660532ed618SSoby Mathew * be saved. 661532ed618SSoby Mathew * 662ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set. 663ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in 664ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared. 665532ed618SSoby Mathew * 666532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world 667ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 668532ed618SSoby Mathew */ 669532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS 670532ed618SSoby Mathewfunc fpregs_context_save 671532ed618SSoby Mathew stp q0, q1, [x0, #CTX_FP_Q0] 672532ed618SSoby Mathew stp q2, q3, [x0, #CTX_FP_Q2] 673532ed618SSoby Mathew stp q4, q5, [x0, #CTX_FP_Q4] 674532ed618SSoby Mathew stp q6, q7, [x0, #CTX_FP_Q6] 675532ed618SSoby Mathew stp q8, q9, [x0, #CTX_FP_Q8] 676532ed618SSoby Mathew stp q10, q11, [x0, #CTX_FP_Q10] 677532ed618SSoby Mathew stp q12, q13, [x0, #CTX_FP_Q12] 678532ed618SSoby Mathew stp q14, q15, [x0, #CTX_FP_Q14] 679532ed618SSoby Mathew stp q16, q17, [x0, #CTX_FP_Q16] 680532ed618SSoby Mathew stp q18, q19, [x0, #CTX_FP_Q18] 681532ed618SSoby Mathew stp q20, q21, [x0, #CTX_FP_Q20] 682532ed618SSoby Mathew stp q22, q23, [x0, #CTX_FP_Q22] 683532ed618SSoby Mathew stp q24, q25, [x0, #CTX_FP_Q24] 684532ed618SSoby Mathew stp q26, q27, [x0, #CTX_FP_Q26] 685532ed618SSoby Mathew stp q28, q29, [x0, #CTX_FP_Q28] 686532ed618SSoby Mathew stp q30, q31, [x0, #CTX_FP_Q30] 687532ed618SSoby Mathew 688532ed618SSoby Mathew mrs x9, fpsr 689532ed618SSoby Mathew str x9, [x0, #CTX_FP_FPSR] 690532ed618SSoby Mathew 691532ed618SSoby Mathew mrs x10, fpcr 692532ed618SSoby Mathew str x10, [x0, #CTX_FP_FPCR] 693532ed618SSoby Mathew 69491089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS 69591089f36SDavid Cunado mrs x11, fpexc32_el2 69691089f36SDavid Cunado str x11, [x0, #CTX_FP_FPEXC32_EL2] 697*0ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */ 698532ed618SSoby Mathew ret 699532ed618SSoby Mathewendfunc fpregs_context_save 700532ed618SSoby Mathew 701ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 702ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use x9-x17 703ed108b56SAlexei Fedorov * (temporary caller-saved registers according to AArch64 PCS) to 704ed108b56SAlexei Fedorov * restore floating point register context. It assumes that 'x0' is 705ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure from where the register context 706532ed618SSoby Mathew * will be restored. 707532ed618SSoby Mathew * 708ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set. 709ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in 710ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared. 711532ed618SSoby Mathew * 712532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world 713ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 714532ed618SSoby Mathew */ 715532ed618SSoby Mathewfunc fpregs_context_restore 716532ed618SSoby Mathew ldp q0, q1, [x0, #CTX_FP_Q0] 717532ed618SSoby Mathew ldp q2, q3, [x0, #CTX_FP_Q2] 718532ed618SSoby Mathew ldp q4, q5, [x0, #CTX_FP_Q4] 719532ed618SSoby Mathew ldp q6, q7, [x0, #CTX_FP_Q6] 720532ed618SSoby Mathew ldp q8, q9, [x0, #CTX_FP_Q8] 721532ed618SSoby Mathew ldp q10, q11, [x0, #CTX_FP_Q10] 722532ed618SSoby Mathew ldp q12, q13, [x0, #CTX_FP_Q12] 723532ed618SSoby Mathew ldp q14, q15, [x0, #CTX_FP_Q14] 724532ed618SSoby Mathew ldp q16, q17, [x0, #CTX_FP_Q16] 725532ed618SSoby Mathew ldp q18, q19, [x0, #CTX_FP_Q18] 726532ed618SSoby Mathew ldp q20, q21, [x0, #CTX_FP_Q20] 727532ed618SSoby Mathew ldp q22, q23, [x0, #CTX_FP_Q22] 728532ed618SSoby Mathew ldp q24, q25, [x0, #CTX_FP_Q24] 729532ed618SSoby Mathew ldp q26, q27, [x0, #CTX_FP_Q26] 730532ed618SSoby Mathew ldp q28, q29, [x0, #CTX_FP_Q28] 731532ed618SSoby Mathew ldp q30, q31, [x0, #CTX_FP_Q30] 732532ed618SSoby Mathew 733532ed618SSoby Mathew ldr x9, [x0, #CTX_FP_FPSR] 734532ed618SSoby Mathew msr fpsr, x9 735532ed618SSoby Mathew 736532ed618SSoby Mathew ldr x10, [x0, #CTX_FP_FPCR] 737532ed618SSoby Mathew msr fpcr, x10 738532ed618SSoby Mathew 73991089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS 74091089f36SDavid Cunado ldr x11, [x0, #CTX_FP_FPEXC32_EL2] 74191089f36SDavid Cunado msr fpexc32_el2, x11 742*0ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */ 743*0ce220afSJayanth Dodderi Chidanand 744532ed618SSoby Mathew /* 745532ed618SSoby Mathew * No explict ISB required here as ERET to 746532ed618SSoby Mathew * switch to secure EL1 or non-secure world 747532ed618SSoby Mathew * covers it 748532ed618SSoby Mathew */ 749532ed618SSoby Mathew 750532ed618SSoby Mathew ret 751532ed618SSoby Mathewendfunc fpregs_context_restore 752532ed618SSoby Mathew#endif /* CTX_INCLUDE_FPREGS */ 753532ed618SSoby Mathew 7547d33ffe4SDaniel Boulby /* 7557d33ffe4SDaniel Boulby * Set the PSTATE bits not set when the exception was taken as 7567d33ffe4SDaniel Boulby * described in the AArch64.TakeException() pseudocode function 7577d33ffe4SDaniel Boulby * in ARM DDI 0487F.c page J1-7635 to a default value. 7587d33ffe4SDaniel Boulby */ 7597d33ffe4SDaniel Boulby .macro set_unset_pstate_bits 7607d33ffe4SDaniel Boulby /* 7617d33ffe4SDaniel Boulby * If Data Independent Timing (DIT) functionality is implemented, 7627d33ffe4SDaniel Boulby * always enable DIT in EL3 7637d33ffe4SDaniel Boulby */ 7647d33ffe4SDaniel Boulby#if ENABLE_FEAT_DIT 7657d33ffe4SDaniel Boulby mov x8, #DIT_BIT 7667d33ffe4SDaniel Boulby msr DIT, x8 7677d33ffe4SDaniel Boulby#endif /* ENABLE_FEAT_DIT */ 7687d33ffe4SDaniel Boulby .endm /* set_unset_pstate_bits */ 7697d33ffe4SDaniel Boulby 770ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 77197215e0fSDaniel Boulby * The following macro is used to save and restore all the general 772ed108b56SAlexei Fedorov * purpose and ARMv8.3-PAuth (if enabled) registers. 773ed108b56SAlexei Fedorov * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3 774ed108b56SAlexei Fedorov * when ARMv8.5-PMU is implemented, and if called from Non-secure 775ed108b56SAlexei Fedorov * state saves PMCR_EL0 and disables Cycle Counter. 776ed108b56SAlexei Fedorov * 777ed108b56SAlexei Fedorov * Ideally we would only save and restore the callee saved registers 778ed108b56SAlexei Fedorov * when a world switch occurs but that type of implementation is more 779ed108b56SAlexei Fedorov * complex. So currently we will always save and restore these 780ed108b56SAlexei Fedorov * registers on entry and exit of EL3. 781532ed618SSoby Mathew * clobbers: x18 782ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 783532ed618SSoby Mathew */ 78497215e0fSDaniel Boulby .macro save_gp_pmcr_pauth_regs 785532ed618SSoby Mathew stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 786532ed618SSoby Mathew stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 787532ed618SSoby Mathew stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 788532ed618SSoby Mathew stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] 789532ed618SSoby Mathew stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] 790532ed618SSoby Mathew stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] 791532ed618SSoby Mathew stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] 792532ed618SSoby Mathew stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] 793532ed618SSoby Mathew stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] 794532ed618SSoby Mathew stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] 795532ed618SSoby Mathew stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] 796532ed618SSoby Mathew stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] 797532ed618SSoby Mathew stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] 798532ed618SSoby Mathew stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] 799532ed618SSoby Mathew stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] 800532ed618SSoby Mathew mrs x18, sp_el0 801532ed618SSoby Mathew str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] 802532ed618SSoby Mathew 803ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 80412f6c064SAlexei Fedorov * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1 80512f6c064SAlexei Fedorov * failed, meaning that FEAT_PMUv3p5/7 is not implemented and 80612f6c064SAlexei Fedorov * PMCR_EL0 should be saved in non-secure context. 807ed108b56SAlexei Fedorov * ---------------------------------------------------------- 808ef653d93SJeenu Viswambharan */ 80912f6c064SAlexei Fedorov mov_imm x10, (MDCR_SCCD_BIT | MDCR_MCCD_BIT) 810ed108b56SAlexei Fedorov mrs x9, mdcr_el3 81112f6c064SAlexei Fedorov tst x9, x10 812ed108b56SAlexei Fedorov bne 1f 813ed108b56SAlexei Fedorov 814ed108b56SAlexei Fedorov /* Secure Cycle Counter is not disabled */ 815ed108b56SAlexei Fedorov mrs x9, pmcr_el0 816ed108b56SAlexei Fedorov 817ed108b56SAlexei Fedorov /* Check caller's security state */ 818ed108b56SAlexei Fedorov mrs x10, scr_el3 819ed108b56SAlexei Fedorov tst x10, #SCR_NS_BIT 820ed108b56SAlexei Fedorov beq 2f 821ed108b56SAlexei Fedorov 822ed108b56SAlexei Fedorov /* Save PMCR_EL0 if called from Non-secure state */ 823ed108b56SAlexei Fedorov str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] 824ed108b56SAlexei Fedorov 825ed108b56SAlexei Fedorov /* Disable cycle counter when event counting is prohibited */ 826ed108b56SAlexei Fedorov2: orr x9, x9, #PMCR_EL0_DP_BIT 827ed108b56SAlexei Fedorov msr pmcr_el0, x9 828ed108b56SAlexei Fedorov isb 829ed108b56SAlexei Fedorov1: 830ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS 831ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 832ed108b56SAlexei Fedorov * Save the ARMv8.3-PAuth keys as they are not banked 833ed108b56SAlexei Fedorov * by exception level 834ed108b56SAlexei Fedorov * ---------------------------------------------------------- 835ed108b56SAlexei Fedorov */ 836ed108b56SAlexei Fedorov add x19, sp, #CTX_PAUTH_REGS_OFFSET 837ed108b56SAlexei Fedorov 838ed108b56SAlexei Fedorov mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */ 839ed108b56SAlexei Fedorov mrs x21, APIAKeyHi_EL1 840ed108b56SAlexei Fedorov mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */ 841ed108b56SAlexei Fedorov mrs x23, APIBKeyHi_EL1 842ed108b56SAlexei Fedorov mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */ 843ed108b56SAlexei Fedorov mrs x25, APDAKeyHi_EL1 844ed108b56SAlexei Fedorov mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */ 845ed108b56SAlexei Fedorov mrs x27, APDBKeyHi_EL1 846ed108b56SAlexei Fedorov mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */ 847ed108b56SAlexei Fedorov mrs x29, APGAKeyHi_EL1 848ed108b56SAlexei Fedorov 849ed108b56SAlexei Fedorov stp x20, x21, [x19, #CTX_PACIAKEY_LO] 850ed108b56SAlexei Fedorov stp x22, x23, [x19, #CTX_PACIBKEY_LO] 851ed108b56SAlexei Fedorov stp x24, x25, [x19, #CTX_PACDAKEY_LO] 852ed108b56SAlexei Fedorov stp x26, x27, [x19, #CTX_PACDBKEY_LO] 853ed108b56SAlexei Fedorov stp x28, x29, [x19, #CTX_PACGAKEY_LO] 854ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */ 85597215e0fSDaniel Boulby .endm /* save_gp_pmcr_pauth_regs */ 85697215e0fSDaniel Boulby 85797215e0fSDaniel Boulby/* ----------------------------------------------------------------- 8587d33ffe4SDaniel Boulby * This function saves the context and sets the PSTATE to a known 8597d33ffe4SDaniel Boulby * state, preparing entry to el3. 86097215e0fSDaniel Boulby * Save all the general purpose and ARMv8.3-PAuth (if enabled) 86197215e0fSDaniel Boulby * registers. 8627d33ffe4SDaniel Boulby * Then set any of the PSTATE bits that are not set by hardware 8637d33ffe4SDaniel Boulby * according to the Aarch64.TakeException pseudocode in the Arm 8647d33ffe4SDaniel Boulby * Architecture Reference Manual to a default value for EL3. 8657d33ffe4SDaniel Boulby * clobbers: x17 86697215e0fSDaniel Boulby * ----------------------------------------------------------------- 86797215e0fSDaniel Boulby */ 86897215e0fSDaniel Boulbyfunc prepare_el3_entry 86997215e0fSDaniel Boulby save_gp_pmcr_pauth_regs 8707d33ffe4SDaniel Boulby /* 8717d33ffe4SDaniel Boulby * Set the PSTATE bits not described in the Aarch64.TakeException 8727d33ffe4SDaniel Boulby * pseudocode to their default values. 8737d33ffe4SDaniel Boulby */ 8747d33ffe4SDaniel Boulby set_unset_pstate_bits 875ed108b56SAlexei Fedorov ret 87697215e0fSDaniel Boulbyendfunc prepare_el3_entry 877ed108b56SAlexei Fedorov 878ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 879ed108b56SAlexei Fedorov * This function restores ARMv8.3-PAuth (if enabled) and all general 880ed108b56SAlexei Fedorov * purpose registers except x30 from the CPU context. 881ed108b56SAlexei Fedorov * x30 register must be explicitly restored by the caller. 882ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 883ed108b56SAlexei Fedorov */ 884ed108b56SAlexei Fedorovfunc restore_gp_pmcr_pauth_regs 885ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS 886ed108b56SAlexei Fedorov /* Restore the ARMv8.3 PAuth keys */ 887ed108b56SAlexei Fedorov add x10, sp, #CTX_PAUTH_REGS_OFFSET 888ed108b56SAlexei Fedorov 889ed108b56SAlexei Fedorov ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */ 890ed108b56SAlexei Fedorov ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */ 891ed108b56SAlexei Fedorov ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */ 892ed108b56SAlexei Fedorov ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */ 893ed108b56SAlexei Fedorov ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */ 894ed108b56SAlexei Fedorov 895ed108b56SAlexei Fedorov msr APIAKeyLo_EL1, x0 896ed108b56SAlexei Fedorov msr APIAKeyHi_EL1, x1 897ed108b56SAlexei Fedorov msr APIBKeyLo_EL1, x2 898ed108b56SAlexei Fedorov msr APIBKeyHi_EL1, x3 899ed108b56SAlexei Fedorov msr APDAKeyLo_EL1, x4 900ed108b56SAlexei Fedorov msr APDAKeyHi_EL1, x5 901ed108b56SAlexei Fedorov msr APDBKeyLo_EL1, x6 902ed108b56SAlexei Fedorov msr APDBKeyHi_EL1, x7 903ed108b56SAlexei Fedorov msr APGAKeyLo_EL1, x8 904ed108b56SAlexei Fedorov msr APGAKeyHi_EL1, x9 905ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */ 906ed108b56SAlexei Fedorov 907ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 908ed108b56SAlexei Fedorov * Restore PMCR_EL0 when returning to Non-secure state if 909ed108b56SAlexei Fedorov * Secure Cycle Counter is not disabled in MDCR_EL3 when 910ed108b56SAlexei Fedorov * ARMv8.5-PMU is implemented. 911ed108b56SAlexei Fedorov * ---------------------------------------------------------- 912ed108b56SAlexei Fedorov */ 913ed108b56SAlexei Fedorov mrs x0, scr_el3 914ed108b56SAlexei Fedorov tst x0, #SCR_NS_BIT 915ed108b56SAlexei Fedorov beq 2f 916ed108b56SAlexei Fedorov 917ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 918ed108b56SAlexei Fedorov * Back to Non-secure state. 91912f6c064SAlexei Fedorov * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1 92012f6c064SAlexei Fedorov * failed, meaning that FEAT_PMUv3p5/7 is not implemented and 92112f6c064SAlexei Fedorov * PMCR_EL0 should be restored from non-secure context. 922ed108b56SAlexei Fedorov * ---------------------------------------------------------- 923ed108b56SAlexei Fedorov */ 92412f6c064SAlexei Fedorov mov_imm x1, (MDCR_SCCD_BIT | MDCR_MCCD_BIT) 925ed108b56SAlexei Fedorov mrs x0, mdcr_el3 92612f6c064SAlexei Fedorov tst x0, x1 927ed108b56SAlexei Fedorov bne 2f 928ed108b56SAlexei Fedorov ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] 929ed108b56SAlexei Fedorov msr pmcr_el0, x0 930ed108b56SAlexei Fedorov2: 931532ed618SSoby Mathew ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 932532ed618SSoby Mathew ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 933532ed618SSoby Mathew ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 934532ed618SSoby Mathew ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] 935532ed618SSoby Mathew ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] 936532ed618SSoby Mathew ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] 937532ed618SSoby Mathew ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] 938532ed618SSoby Mathew ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] 939ef653d93SJeenu Viswambharan ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] 940532ed618SSoby Mathew ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] 941532ed618SSoby Mathew ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] 942532ed618SSoby Mathew ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] 943532ed618SSoby Mathew ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] 944532ed618SSoby Mathew ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] 945ef653d93SJeenu Viswambharan ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] 946ef653d93SJeenu Viswambharan msr sp_el0, x28 947532ed618SSoby Mathew ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] 948ef653d93SJeenu Viswambharan ret 949ed108b56SAlexei Fedorovendfunc restore_gp_pmcr_pauth_regs 950ef653d93SJeenu Viswambharan 9513b8456bdSManish V Badarkhe/* 9523b8456bdSManish V Badarkhe * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1 9533b8456bdSManish V Badarkhe * registers and update EL1 registers to disable stage1 and stage2 9543b8456bdSManish V Badarkhe * page table walk 9553b8456bdSManish V Badarkhe */ 9563b8456bdSManish V Badarkhefunc save_and_update_ptw_el1_sys_regs 9573b8456bdSManish V Badarkhe /* ---------------------------------------------------------- 9583b8456bdSManish V Badarkhe * Save only sctlr_el1 and tcr_el1 registers 9593b8456bdSManish V Badarkhe * ---------------------------------------------------------- 9603b8456bdSManish V Badarkhe */ 9613b8456bdSManish V Badarkhe mrs x29, sctlr_el1 9623b8456bdSManish V Badarkhe str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)] 9633b8456bdSManish V Badarkhe mrs x29, tcr_el1 9643b8456bdSManish V Badarkhe str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)] 9653b8456bdSManish V Badarkhe 9663b8456bdSManish V Badarkhe /* ------------------------------------------------------------ 9673b8456bdSManish V Badarkhe * Must follow below order in order to disable page table 9683b8456bdSManish V Badarkhe * walk for lower ELs (EL1 and EL0). First step ensures that 9693b8456bdSManish V Badarkhe * page table walk is disabled for stage1 and second step 9703b8456bdSManish V Badarkhe * ensures that page table walker should use TCR_EL1.EPDx 9713b8456bdSManish V Badarkhe * bits to perform address translation. ISB ensures that CPU 9723b8456bdSManish V Badarkhe * does these 2 steps in order. 9733b8456bdSManish V Badarkhe * 9743b8456bdSManish V Badarkhe * 1. Update TCR_EL1.EPDx bits to disable page table walk by 9753b8456bdSManish V Badarkhe * stage1. 9763b8456bdSManish V Badarkhe * 2. Enable MMU bit to avoid identity mapping via stage2 9773b8456bdSManish V Badarkhe * and force TCR_EL1.EPDx to be used by the page table 9783b8456bdSManish V Badarkhe * walker. 9793b8456bdSManish V Badarkhe * ------------------------------------------------------------ 9803b8456bdSManish V Badarkhe */ 9813b8456bdSManish V Badarkhe orr x29, x29, #(TCR_EPD0_BIT) 9823b8456bdSManish V Badarkhe orr x29, x29, #(TCR_EPD1_BIT) 9833b8456bdSManish V Badarkhe msr tcr_el1, x29 9843b8456bdSManish V Badarkhe isb 9853b8456bdSManish V Badarkhe mrs x29, sctlr_el1 9863b8456bdSManish V Badarkhe orr x29, x29, #SCTLR_M_BIT 9873b8456bdSManish V Badarkhe msr sctlr_el1, x29 9883b8456bdSManish V Badarkhe isb 9893b8456bdSManish V Badarkhe 9903b8456bdSManish V Badarkhe ret 9913b8456bdSManish V Badarkheendfunc save_and_update_ptw_el1_sys_regs 9923b8456bdSManish V Badarkhe 993ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 994ed108b56SAlexei Fedorov * This routine assumes that the SP_EL3 is pointing to a valid 995ed108b56SAlexei Fedorov * context structure from where the gp regs and other special 996ed108b56SAlexei Fedorov * registers can be retrieved. 997ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 998532ed618SSoby Mathew */ 999532ed618SSoby Mathewfunc el3_exit 1000bb9549baSJan Dabros#if ENABLE_ASSERTIONS 1001bb9549baSJan Dabros /* el3_exit assumes SP_EL0 on entry */ 1002bb9549baSJan Dabros mrs x17, spsel 1003bb9549baSJan Dabros cmp x17, #MODE_SP_EL0 1004bb9549baSJan Dabros ASM_ASSERT(eq) 1005*0ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_ASSERTIONS */ 1006bb9549baSJan Dabros 1007ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 1008ed108b56SAlexei Fedorov * Save the current SP_EL0 i.e. the EL3 runtime stack which 1009ed108b56SAlexei Fedorov * will be used for handling the next SMC. 1010ed108b56SAlexei Fedorov * Then switch to SP_EL3. 1011ed108b56SAlexei Fedorov * ---------------------------------------------------------- 1012532ed618SSoby Mathew */ 1013532ed618SSoby Mathew mov x17, sp 1014ed108b56SAlexei Fedorov msr spsel, #MODE_SP_ELX 1015532ed618SSoby Mathew str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] 1016532ed618SSoby Mathew 1017ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 1018532ed618SSoby Mathew * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET 1019ed108b56SAlexei Fedorov * ---------------------------------------------------------- 1020532ed618SSoby Mathew */ 1021532ed618SSoby Mathew ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] 1022532ed618SSoby Mathew ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] 1023532ed618SSoby Mathew msr scr_el3, x18 1024532ed618SSoby Mathew msr spsr_el3, x16 1025532ed618SSoby Mathew msr elr_el3, x17 1026532ed618SSoby Mathew 10270c5e7d1cSMax Shvetsov#if IMAGE_BL31 10280c5e7d1cSMax Shvetsov /* ---------------------------------------------------------- 102968ac5ed0SArunachalam Ganapathy * Restore CPTR_EL3. 10300c5e7d1cSMax Shvetsov * ZCR is only restored if SVE is supported and enabled. 10310c5e7d1cSMax Shvetsov * Synchronization is required before zcr_el3 is addressed. 10320c5e7d1cSMax Shvetsov * ---------------------------------------------------------- 10330c5e7d1cSMax Shvetsov */ 10340c5e7d1cSMax Shvetsov ldp x19, x20, [sp, #CTX_EL3STATE_OFFSET + CTX_CPTR_EL3] 10350c5e7d1cSMax Shvetsov msr cptr_el3, x19 10360c5e7d1cSMax Shvetsov 10370c5e7d1cSMax Shvetsov ands x19, x19, #CPTR_EZ_BIT 10380c5e7d1cSMax Shvetsov beq sve_not_enabled 10390c5e7d1cSMax Shvetsov 10400c5e7d1cSMax Shvetsov isb 10410c5e7d1cSMax Shvetsov msr S3_6_C1_C2_0, x20 /* zcr_el3 */ 10420c5e7d1cSMax Shvetsovsve_not_enabled: 1043*0ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 */ 10440c5e7d1cSMax Shvetsov 1045fe007b2eSDimitris Papastamos#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 1046ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 1047ed108b56SAlexei Fedorov * Restore mitigation state as it was on entry to EL3 1048ed108b56SAlexei Fedorov * ---------------------------------------------------------- 1049ed108b56SAlexei Fedorov */ 1050fe007b2eSDimitris Papastamos ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE] 1051ed108b56SAlexei Fedorov cbz x17, 1f 1052fe007b2eSDimitris Papastamos blr x17 10534d1ccf0eSAntonio Nino Diaz1: 1054*0ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */ 1055*0ce220afSJayanth Dodderi Chidanand 10563b8456bdSManish V Badarkhe restore_ptw_el1_sys_regs 10573b8456bdSManish V Badarkhe 1058ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 1059ed108b56SAlexei Fedorov * Restore general purpose (including x30), PMCR_EL0 and 1060ed108b56SAlexei Fedorov * ARMv8.3-PAuth registers. 1061ed108b56SAlexei Fedorov * Exit EL3 via ERET to a lower exception level. 1062ed108b56SAlexei Fedorov * ---------------------------------------------------------- 1063ed108b56SAlexei Fedorov */ 1064ed108b56SAlexei Fedorov bl restore_gp_pmcr_pauth_regs 1065ed108b56SAlexei Fedorov ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 1066fe007b2eSDimitris Papastamos 1067ed108b56SAlexei Fedorov#if IMAGE_BL31 && RAS_EXTENSION 1068ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 1069ed108b56SAlexei Fedorov * Issue Error Synchronization Barrier to synchronize SErrors 1070ed108b56SAlexei Fedorov * before exiting EL3. We're running with EAs unmasked, so 1071ed108b56SAlexei Fedorov * any synchronized errors would be taken immediately; 1072ed108b56SAlexei Fedorov * therefore no need to inspect DISR_EL1 register. 1073ed108b56SAlexei Fedorov * ---------------------------------------------------------- 1074ed108b56SAlexei Fedorov */ 1075ed108b56SAlexei Fedorov esb 1076c2d32a5fSMadhukar Pappireddy#else 1077c2d32a5fSMadhukar Pappireddy dsb sy 1078*0ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 && RAS_EXTENSION */ 1079*0ce220afSJayanth Dodderi Chidanand 1080c2d32a5fSMadhukar Pappireddy#ifdef IMAGE_BL31 1081c2d32a5fSMadhukar Pappireddy str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3] 1082*0ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 */ 1083*0ce220afSJayanth Dodderi Chidanand 1084f461fe34SAnthony Steinhauser exception_return 10855283962eSAntonio Nino Diaz 1086532ed618SSoby Mathewendfunc el3_exit 1087