1532ed618SSoby Mathew/* 2*ed804406SRohit Mathew * Copyright (c) 2013-2023, Arm Limited and Contributors. All rights reserved. 3532ed618SSoby Mathew * 482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause 5532ed618SSoby Mathew */ 6532ed618SSoby Mathew 7532ed618SSoby Mathew#include <arch.h> 8532ed618SSoby Mathew#include <asm_macros.S> 9bb9549baSJan Dabros#include <assert_macros.S> 10532ed618SSoby Mathew#include <context.h> 113b8456bdSManish V Badarkhe#include <el3_common_macros.S> 12532ed618SSoby Mathew 1328f39f02SMax Shvetsov#if CTX_INCLUDE_EL2_REGS 14d20052f3SZelalem Aweke .global el2_sysregs_context_save_common 15d20052f3SZelalem Aweke .global el2_sysregs_context_restore_common 16d20052f3SZelalem Aweke#if ENABLE_SPE_FOR_LOWER_ELS 17d20052f3SZelalem Aweke .global el2_sysregs_context_save_spe 18d20052f3SZelalem Aweke .global el2_sysregs_context_restore_spe 19d20052f3SZelalem Aweke#endif /* ENABLE_SPE_FOR_LOWER_ELS */ 20d20052f3SZelalem Aweke#if CTX_INCLUDE_MTE_REGS 21d20052f3SZelalem Aweke .global el2_sysregs_context_save_mte 22d20052f3SZelalem Aweke .global el2_sysregs_context_restore_mte 23d20052f3SZelalem Aweke#endif /* CTX_INCLUDE_MTE_REGS */ 24d20052f3SZelalem Aweke#if ENABLE_MPAM_FOR_LOWER_ELS 25d20052f3SZelalem Aweke .global el2_sysregs_context_save_mpam 26d20052f3SZelalem Aweke .global el2_sysregs_context_restore_mpam 27d20052f3SZelalem Aweke#endif /* ENABLE_MPAM_FOR_LOWER_ELS */ 28d20052f3SZelalem Aweke#if ENABLE_FEAT_ECV 29d20052f3SZelalem Aweke .global el2_sysregs_context_save_ecv 30d20052f3SZelalem Aweke .global el2_sysregs_context_restore_ecv 31d20052f3SZelalem Aweke#endif /* ENABLE_FEAT_ECV */ 32d20052f3SZelalem Aweke#if ENABLE_FEAT_VHE 33d20052f3SZelalem Aweke .global el2_sysregs_context_save_vhe 34d20052f3SZelalem Aweke .global el2_sysregs_context_restore_vhe 35d20052f3SZelalem Aweke#endif /* ENABLE_FEAT_VHE */ 36d20052f3SZelalem Aweke#if RAS_EXTENSION 37d20052f3SZelalem Aweke .global el2_sysregs_context_save_ras 38d20052f3SZelalem Aweke .global el2_sysregs_context_restore_ras 39d20052f3SZelalem Aweke#endif /* RAS_EXTENSION */ 40d20052f3SZelalem Aweke#if CTX_INCLUDE_NEVE_REGS 41d20052f3SZelalem Aweke .global el2_sysregs_context_save_nv2 42d20052f3SZelalem Aweke .global el2_sysregs_context_restore_nv2 43d20052f3SZelalem Aweke#endif /* CTX_INCLUDE_NEVE_REGS */ 44d20052f3SZelalem Aweke#if ENABLE_TRF_FOR_NS 45d20052f3SZelalem Aweke .global el2_sysregs_context_save_trf 46d20052f3SZelalem Aweke .global el2_sysregs_context_restore_trf 47d20052f3SZelalem Aweke#endif /* ENABLE_TRF_FOR_NS */ 48d20052f3SZelalem Aweke#if ENABLE_FEAT_CSV2_2 49d20052f3SZelalem Aweke .global el2_sysregs_context_save_csv2 50d20052f3SZelalem Aweke .global el2_sysregs_context_restore_csv2 51d20052f3SZelalem Aweke#endif /* ENABLE_FEAT_CSV2_2 */ 520ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_EL2_REGS */ 5328f39f02SMax Shvetsov 54532ed618SSoby Mathew .global el1_sysregs_context_save 55532ed618SSoby Mathew .global el1_sysregs_context_restore 56532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS 57532ed618SSoby Mathew .global fpregs_context_save 58532ed618SSoby Mathew .global fpregs_context_restore 590ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_FPREGS */ 6097215e0fSDaniel Boulby .global prepare_el3_entry 61ed108b56SAlexei Fedorov .global restore_gp_pmcr_pauth_regs 623b8456bdSManish V Badarkhe .global save_and_update_ptw_el1_sys_regs 63532ed618SSoby Mathew .global el3_exit 64532ed618SSoby Mathew 6528f39f02SMax Shvetsov#if CTX_INCLUDE_EL2_REGS 6628f39f02SMax Shvetsov 6728f39f02SMax Shvetsov/* ----------------------------------------------------- 68d20052f3SZelalem Aweke * The following functions strictly follow the AArch64 69a7cf2743SMax Shvetsov * PCS to use x9-x16 (temporary caller-saved registers) 70d20052f3SZelalem Aweke * to save/restore EL2 system register context. 71d20052f3SZelalem Aweke * el2_sysregs_context_save/restore_common functions 72d20052f3SZelalem Aweke * save and restore registers that are common to all 73d20052f3SZelalem Aweke * configurations. The rest of the functions save and 74d20052f3SZelalem Aweke * restore EL2 system registers that are present when a 75d20052f3SZelalem Aweke * particular feature is enabled. All functions assume 76d20052f3SZelalem Aweke * that 'x0' is pointing to a 'el2_sys_regs' structure 77d20052f3SZelalem Aweke * where the register context will be saved/restored. 782825946eSMax Shvetsov * 792825946eSMax Shvetsov * The following registers are not added. 802825946eSMax Shvetsov * AMEVCNTVOFF0<n>_EL2 812825946eSMax Shvetsov * AMEVCNTVOFF1<n>_EL2 822825946eSMax Shvetsov * ICH_AP0R<n>_EL2 832825946eSMax Shvetsov * ICH_AP1R<n>_EL2 842825946eSMax Shvetsov * ICH_LR<n>_EL2 8528f39f02SMax Shvetsov * ----------------------------------------------------- 8628f39f02SMax Shvetsov */ 87d20052f3SZelalem Awekefunc el2_sysregs_context_save_common 8828f39f02SMax Shvetsov mrs x9, actlr_el2 892825946eSMax Shvetsov mrs x10, afsr0_el2 902825946eSMax Shvetsov stp x9, x10, [x0, #CTX_ACTLR_EL2] 9128f39f02SMax Shvetsov 922825946eSMax Shvetsov mrs x11, afsr1_el2 932825946eSMax Shvetsov mrs x12, amair_el2 942825946eSMax Shvetsov stp x11, x12, [x0, #CTX_AFSR1_EL2] 9528f39f02SMax Shvetsov 962825946eSMax Shvetsov mrs x13, cnthctl_el2 97a7cf2743SMax Shvetsov mrs x14, cntvoff_el2 982825946eSMax Shvetsov stp x13, x14, [x0, #CTX_CNTHCTL_EL2] 9928f39f02SMax Shvetsov 100a7cf2743SMax Shvetsov mrs x15, cptr_el2 101a7cf2743SMax Shvetsov str x15, [x0, #CTX_CPTR_EL2] 10228f39f02SMax Shvetsov 1030f777eabSArunachalam Ganapathy#if CTX_INCLUDE_AARCH32_REGS 104a7cf2743SMax Shvetsov mrs x16, dbgvcr32_el2 105a7cf2743SMax Shvetsov str x16, [x0, #CTX_DBGVCR32_EL2] 1060ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */ 10728f39f02SMax Shvetsov 108a7cf2743SMax Shvetsov mrs x9, elr_el2 109a7cf2743SMax Shvetsov mrs x10, esr_el2 110a7cf2743SMax Shvetsov stp x9, x10, [x0, #CTX_ELR_EL2] 11128f39f02SMax Shvetsov 112a7cf2743SMax Shvetsov mrs x11, far_el2 113a7cf2743SMax Shvetsov mrs x12, hacr_el2 114a7cf2743SMax Shvetsov stp x11, x12, [x0, #CTX_FAR_EL2] 11528f39f02SMax Shvetsov 116a7cf2743SMax Shvetsov mrs x13, hcr_el2 117a7cf2743SMax Shvetsov mrs x14, hpfar_el2 118a7cf2743SMax Shvetsov stp x13, x14, [x0, #CTX_HCR_EL2] 11928f39f02SMax Shvetsov 120a7cf2743SMax Shvetsov mrs x15, hstr_el2 121a7cf2743SMax Shvetsov mrs x16, ICC_SRE_EL2 122a7cf2743SMax Shvetsov stp x15, x16, [x0, #CTX_HSTR_EL2] 12328f39f02SMax Shvetsov 124a7cf2743SMax Shvetsov mrs x9, ICH_HCR_EL2 125a7cf2743SMax Shvetsov mrs x10, ICH_VMCR_EL2 126a7cf2743SMax Shvetsov stp x9, x10, [x0, #CTX_ICH_HCR_EL2] 12728f39f02SMax Shvetsov 128a7cf2743SMax Shvetsov mrs x11, mair_el2 129a7cf2743SMax Shvetsov mrs x12, mdcr_el2 130a7cf2743SMax Shvetsov stp x11, x12, [x0, #CTX_MAIR_EL2] 131a7cf2743SMax Shvetsov 132a7cf2743SMax Shvetsov mrs x14, sctlr_el2 133a7cf2743SMax Shvetsov str x14, [x0, #CTX_SCTLR_EL2] 13428f39f02SMax Shvetsov 135a7cf2743SMax Shvetsov mrs x15, spsr_el2 136a7cf2743SMax Shvetsov mrs x16, sp_el2 137a7cf2743SMax Shvetsov stp x15, x16, [x0, #CTX_SPSR_EL2] 13828f39f02SMax Shvetsov 139a7cf2743SMax Shvetsov mrs x9, tcr_el2 140a7cf2743SMax Shvetsov mrs x10, tpidr_el2 141a7cf2743SMax Shvetsov stp x9, x10, [x0, #CTX_TCR_EL2] 14228f39f02SMax Shvetsov 143a7cf2743SMax Shvetsov mrs x11, ttbr0_el2 144a7cf2743SMax Shvetsov mrs x12, vbar_el2 145a7cf2743SMax Shvetsov stp x11, x12, [x0, #CTX_TTBR0_EL2] 14628f39f02SMax Shvetsov 147a7cf2743SMax Shvetsov mrs x13, vmpidr_el2 148a7cf2743SMax Shvetsov mrs x14, vpidr_el2 149a7cf2743SMax Shvetsov stp x13, x14, [x0, #CTX_VMPIDR_EL2] 15028f39f02SMax Shvetsov 151a7cf2743SMax Shvetsov mrs x15, vtcr_el2 152a7cf2743SMax Shvetsov mrs x16, vttbr_el2 153a7cf2743SMax Shvetsov stp x15, x16, [x0, #CTX_VTCR_EL2] 15428f39f02SMax Shvetsov ret 155d20052f3SZelalem Awekeendfunc el2_sysregs_context_save_common 15628f39f02SMax Shvetsov 157d20052f3SZelalem Awekefunc el2_sysregs_context_restore_common 1582825946eSMax Shvetsov ldp x9, x10, [x0, #CTX_ACTLR_EL2] 15928f39f02SMax Shvetsov msr actlr_el2, x9 1602825946eSMax Shvetsov msr afsr0_el2, x10 16128f39f02SMax Shvetsov 1622825946eSMax Shvetsov ldp x11, x12, [x0, #CTX_AFSR1_EL2] 1632825946eSMax Shvetsov msr afsr1_el2, x11 1642825946eSMax Shvetsov msr amair_el2, x12 16528f39f02SMax Shvetsov 1662825946eSMax Shvetsov ldp x13, x14, [x0, #CTX_CNTHCTL_EL2] 1672825946eSMax Shvetsov msr cnthctl_el2, x13 168a7cf2743SMax Shvetsov msr cntvoff_el2, x14 16928f39f02SMax Shvetsov 170a7cf2743SMax Shvetsov ldr x15, [x0, #CTX_CPTR_EL2] 171a7cf2743SMax Shvetsov msr cptr_el2, x15 17228f39f02SMax Shvetsov 1730f777eabSArunachalam Ganapathy#if CTX_INCLUDE_AARCH32_REGS 174a7cf2743SMax Shvetsov ldr x16, [x0, #CTX_DBGVCR32_EL2] 175a7cf2743SMax Shvetsov msr dbgvcr32_el2, x16 1760ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */ 17728f39f02SMax Shvetsov 178a7cf2743SMax Shvetsov ldp x9, x10, [x0, #CTX_ELR_EL2] 179a7cf2743SMax Shvetsov msr elr_el2, x9 180a7cf2743SMax Shvetsov msr esr_el2, x10 18128f39f02SMax Shvetsov 182a7cf2743SMax Shvetsov ldp x11, x12, [x0, #CTX_FAR_EL2] 183a7cf2743SMax Shvetsov msr far_el2, x11 184a7cf2743SMax Shvetsov msr hacr_el2, x12 18528f39f02SMax Shvetsov 186a7cf2743SMax Shvetsov ldp x13, x14, [x0, #CTX_HCR_EL2] 187a7cf2743SMax Shvetsov msr hcr_el2, x13 188a7cf2743SMax Shvetsov msr hpfar_el2, x14 18928f39f02SMax Shvetsov 190a7cf2743SMax Shvetsov ldp x15, x16, [x0, #CTX_HSTR_EL2] 191a7cf2743SMax Shvetsov msr hstr_el2, x15 192a7cf2743SMax Shvetsov msr ICC_SRE_EL2, x16 19328f39f02SMax Shvetsov 194a7cf2743SMax Shvetsov ldp x9, x10, [x0, #CTX_ICH_HCR_EL2] 195a7cf2743SMax Shvetsov msr ICH_HCR_EL2, x9 196a7cf2743SMax Shvetsov msr ICH_VMCR_EL2, x10 197a7cf2743SMax Shvetsov 198a7cf2743SMax Shvetsov ldp x11, x12, [x0, #CTX_MAIR_EL2] 199a7cf2743SMax Shvetsov msr mair_el2, x11 200a7cf2743SMax Shvetsov msr mdcr_el2, x12 20128f39f02SMax Shvetsov 202a7cf2743SMax Shvetsov ldr x14, [x0, #CTX_SCTLR_EL2] 203a7cf2743SMax Shvetsov msr sctlr_el2, x14 20428f39f02SMax Shvetsov 205a7cf2743SMax Shvetsov ldp x15, x16, [x0, #CTX_SPSR_EL2] 206a7cf2743SMax Shvetsov msr spsr_el2, x15 207a7cf2743SMax Shvetsov msr sp_el2, x16 20828f39f02SMax Shvetsov 209a7cf2743SMax Shvetsov ldp x9, x10, [x0, #CTX_TCR_EL2] 210a7cf2743SMax Shvetsov msr tcr_el2, x9 211a7cf2743SMax Shvetsov msr tpidr_el2, x10 21228f39f02SMax Shvetsov 213a7cf2743SMax Shvetsov ldp x11, x12, [x0, #CTX_TTBR0_EL2] 214a7cf2743SMax Shvetsov msr ttbr0_el2, x11 215a7cf2743SMax Shvetsov msr vbar_el2, x12 21628f39f02SMax Shvetsov 217a7cf2743SMax Shvetsov ldp x13, x14, [x0, #CTX_VMPIDR_EL2] 218a7cf2743SMax Shvetsov msr vmpidr_el2, x13 219a7cf2743SMax Shvetsov msr vpidr_el2, x14 22028f39f02SMax Shvetsov 221a7cf2743SMax Shvetsov ldp x15, x16, [x0, #CTX_VTCR_EL2] 222a7cf2743SMax Shvetsov msr vtcr_el2, x15 223a7cf2743SMax Shvetsov msr vttbr_el2, x16 224d20052f3SZelalem Aweke ret 225d20052f3SZelalem Awekeendfunc el2_sysregs_context_restore_common 226d20052f3SZelalem Aweke 227d20052f3SZelalem Aweke#if ENABLE_SPE_FOR_LOWER_ELS 228d20052f3SZelalem Awekefunc el2_sysregs_context_save_spe 229d20052f3SZelalem Aweke mrs x13, PMSCR_EL2 230d20052f3SZelalem Aweke str x13, [x0, #CTX_PMSCR_EL2] 231d20052f3SZelalem Aweke ret 232d20052f3SZelalem Awekeendfunc el2_sysregs_context_save_spe 233d20052f3SZelalem Aweke 234d20052f3SZelalem Awekefunc el2_sysregs_context_restore_spe 235d20052f3SZelalem Aweke ldr x13, [x0, #CTX_PMSCR_EL2] 236d20052f3SZelalem Aweke msr PMSCR_EL2, x13 237d20052f3SZelalem Aweke ret 238d20052f3SZelalem Awekeendfunc el2_sysregs_context_restore_spe 239d20052f3SZelalem Aweke#endif /* ENABLE_SPE_FOR_LOWER_ELS */ 24028f39f02SMax Shvetsov 2412825946eSMax Shvetsov#if CTX_INCLUDE_MTE_REGS 242d20052f3SZelalem Awekefunc el2_sysregs_context_save_mte 243d20052f3SZelalem Aweke mrs x9, TFSR_EL2 244d20052f3SZelalem Aweke str x9, [x0, #CTX_TFSR_EL2] 245d20052f3SZelalem Aweke ret 246d20052f3SZelalem Awekeendfunc el2_sysregs_context_save_mte 247d20052f3SZelalem Aweke 248d20052f3SZelalem Awekefunc el2_sysregs_context_restore_mte 249fb2072b0SManish V Badarkhe ldr x9, [x0, #CTX_TFSR_EL2] 250fb2072b0SManish V Badarkhe msr TFSR_EL2, x9 251d20052f3SZelalem Aweke ret 252d20052f3SZelalem Awekeendfunc el2_sysregs_context_restore_mte 2530ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_MTE_REGS */ 25428f39f02SMax Shvetsov 2552825946eSMax Shvetsov#if ENABLE_MPAM_FOR_LOWER_ELS 256d20052f3SZelalem Awekefunc el2_sysregs_context_save_mpam 257d20052f3SZelalem Aweke mrs x10, MPAM2_EL2 258d20052f3SZelalem Aweke str x10, [x0, #CTX_MPAM2_EL2] 259d20052f3SZelalem Aweke 260*ed804406SRohit Mathew mrs x10, MPAMIDR_EL1 261*ed804406SRohit Mathew 262*ed804406SRohit Mathew /* 263*ed804406SRohit Mathew * The context registers that we intend to save would be part of the 264*ed804406SRohit Mathew * PE's system register frame only if MPAMIDR_EL1.HAS_HCR == 1. 265*ed804406SRohit Mathew */ 266*ed804406SRohit Mathew tbz w10, #MPAMIDR_EL1_HAS_HCR_SHIFT, 3f 267*ed804406SRohit Mathew 268*ed804406SRohit Mathew /* 269*ed804406SRohit Mathew * MPAMHCR_EL2, MPAMVPMV_EL2 and MPAMVPM0_EL2 would be present in the 270*ed804406SRohit Mathew * system register frame if MPAMIDR_EL1.HAS_HCR == 1. Proceed to save 271*ed804406SRohit Mathew * the context of these registers. 272*ed804406SRohit Mathew */ 273d20052f3SZelalem Aweke mrs x11, MPAMHCR_EL2 274d20052f3SZelalem Aweke mrs x12, MPAMVPM0_EL2 275d20052f3SZelalem Aweke stp x11, x12, [x0, #CTX_MPAMHCR_EL2] 276d20052f3SZelalem Aweke 277*ed804406SRohit Mathew mrs x13, MPAMVPMV_EL2 278*ed804406SRohit Mathew str x13, [x0, #CTX_MPAMVPMV_EL2] 279d20052f3SZelalem Aweke 280*ed804406SRohit Mathew /* 281*ed804406SRohit Mathew * MPAMIDR_EL1.VPMR_MAX has to be probed to obtain the maximum supported 282*ed804406SRohit Mathew * VPMR value. Proceed to save the context of registers from 283*ed804406SRohit Mathew * MPAMVPM1_EL2 to MPAMVPM<x>_EL2 where x is VPMR_MAX. From MPAM spec, 284*ed804406SRohit Mathew * VPMR_MAX should not be zero if HAS_HCR == 1. 285*ed804406SRohit Mathew */ 286*ed804406SRohit Mathew ubfx x10, x10, #MPAMIDR_EL1_VPMR_MAX_SHIFT, \ 287*ed804406SRohit Mathew #MPAMIDR_EL1_VPMR_MAX_WIDTH 288d20052f3SZelalem Aweke 289*ed804406SRohit Mathew /* 290*ed804406SRohit Mathew * Once VPMR_MAX has been identified, calculate the offset relative to 291*ed804406SRohit Mathew * PC to jump to so that relevant context can be saved. The offset is 292*ed804406SRohit Mathew * calculated as (VPMR_POSSIBLE_MAX - VPMR_MAX) * (instruction size for 293*ed804406SRohit Mathew * saving one VPM register) + (absolute address of label "1"). 294*ed804406SRohit Mathew */ 295*ed804406SRohit Mathew mov w11, #MPAMIDR_EL1_VPMR_MAX_POSSIBLE 296*ed804406SRohit Mathew sub w10, w11, w10 297d20052f3SZelalem Aweke 298*ed804406SRohit Mathew /* Calculate the size of one block of MPAMVPM*_EL2 save */ 299*ed804406SRohit Mathew adr x11, 1f 300*ed804406SRohit Mathew adr x12, 2f 301*ed804406SRohit Mathew sub x12, x12, x11 302*ed804406SRohit Mathew 303*ed804406SRohit Mathew madd x10, x10, x12, x11 304*ed804406SRohit Mathew br x10 305*ed804406SRohit Mathew 306*ed804406SRohit Mathew /* 307*ed804406SRohit Mathew * The branch above would land properly on one of the blocks following 308*ed804406SRohit Mathew * label "1". Make sure that the order of save is retained. 309*ed804406SRohit Mathew */ 310*ed804406SRohit Mathew1: 311*ed804406SRohit Mathew#if ENABLE_BTI 312*ed804406SRohit Mathew bti j 313*ed804406SRohit Mathew#endif 314*ed804406SRohit Mathew mrs x10, MPAMVPM7_EL2 315*ed804406SRohit Mathew str x10, [x0, #CTX_MPAMVPM7_EL2] 316*ed804406SRohit Mathew2: 317*ed804406SRohit Mathew#if ENABLE_BTI 318*ed804406SRohit Mathew bti j 319*ed804406SRohit Mathew#endif 320*ed804406SRohit Mathew mrs x11, MPAMVPM6_EL2 321*ed804406SRohit Mathew str x11, [x0, #CTX_MPAMVPM6_EL2] 322*ed804406SRohit Mathew 323*ed804406SRohit Mathew#if ENABLE_BTI 324*ed804406SRohit Mathew bti j 325*ed804406SRohit Mathew#endif 326*ed804406SRohit Mathew mrs x12, MPAMVPM5_EL2 327*ed804406SRohit Mathew str x12, [x0, #CTX_MPAMVPM5_EL2] 328*ed804406SRohit Mathew 329*ed804406SRohit Mathew#if ENABLE_BTI 330*ed804406SRohit Mathew bti j 331*ed804406SRohit Mathew#endif 332*ed804406SRohit Mathew mrs x13, MPAMVPM4_EL2 333*ed804406SRohit Mathew str x13, [x0, #CTX_MPAMVPM4_EL2] 334*ed804406SRohit Mathew 335*ed804406SRohit Mathew#if ENABLE_BTI 336*ed804406SRohit Mathew bti j 337*ed804406SRohit Mathew#endif 338*ed804406SRohit Mathew mrs x14, MPAMVPM3_EL2 339*ed804406SRohit Mathew str x14, [x0, #CTX_MPAMVPM3_EL2] 340*ed804406SRohit Mathew 341*ed804406SRohit Mathew#if ENABLE_BTI 342*ed804406SRohit Mathew bti j 343*ed804406SRohit Mathew#endif 344*ed804406SRohit Mathew mrs x15, MPAMVPM2_EL2 345*ed804406SRohit Mathew str x15, [x0, #CTX_MPAMVPM2_EL2] 346*ed804406SRohit Mathew 347*ed804406SRohit Mathew#if ENABLE_BTI 348*ed804406SRohit Mathew bti j 349*ed804406SRohit Mathew#endif 350*ed804406SRohit Mathew mrs x16, MPAMVPM1_EL2 351*ed804406SRohit Mathew str x16, [x0, #CTX_MPAMVPM1_EL2] 352*ed804406SRohit Mathew 353*ed804406SRohit Mathew3: ret 3540e0bd250SRohit Mathewendfunc el2_sysregs_context_save_mpam 355d20052f3SZelalem Aweke 356d20052f3SZelalem Awekefunc el2_sysregs_context_restore_mpam 357a7cf2743SMax Shvetsov ldr x10, [x0, #CTX_MPAM2_EL2] 358fb2072b0SManish V Badarkhe msr MPAM2_EL2, x10 359a7cf2743SMax Shvetsov 360*ed804406SRohit Mathew mrs x10, MPAMIDR_EL1 361*ed804406SRohit Mathew /* 362*ed804406SRohit Mathew * The context registers that we intend to restore would be part of the 363*ed804406SRohit Mathew * PE's system register frame only if MPAMIDR_EL1.HAS_HCR == 1. 364*ed804406SRohit Mathew */ 365*ed804406SRohit Mathew tbz w10, #MPAMIDR_EL1_HAS_HCR_SHIFT, 3f 366*ed804406SRohit Mathew 367*ed804406SRohit Mathew /* 368*ed804406SRohit Mathew * MPAMHCR_EL2, MPAMVPMV_EL2 and MPAMVPM0_EL2 would be present in the 369*ed804406SRohit Mathew * system register frame if MPAMIDR_EL1.HAS_HCR == 1. Proceed to restore 370*ed804406SRohit Mathew * the context of these registers 371*ed804406SRohit Mathew */ 372a7cf2743SMax Shvetsov ldp x11, x12, [x0, #CTX_MPAMHCR_EL2] 373fb2072b0SManish V Badarkhe msr MPAMHCR_EL2, x11 374fb2072b0SManish V Badarkhe msr MPAMVPM0_EL2, x12 375a7cf2743SMax Shvetsov 376*ed804406SRohit Mathew ldr x13, [x0, #CTX_MPAMVPMV_EL2] 377*ed804406SRohit Mathew msr MPAMVPMV_EL2, x13 378a7cf2743SMax Shvetsov 379*ed804406SRohit Mathew /* 380*ed804406SRohit Mathew * MPAMIDR_EL1.VPMR_MAX has to be probed to obtain the maximum supported 381*ed804406SRohit Mathew * VPMR value. Proceed to restore the context of registers from 382*ed804406SRohit Mathew * MPAMVPM1_EL2 to MPAMVPM<x>_EL2 where x is VPMR_MAX. from MPAM spec, 383*ed804406SRohit Mathew * VPMR_MAX should not be zero if HAS_HCR == 1. 384*ed804406SRohit Mathew */ 385*ed804406SRohit Mathew ubfx x10, x10, #MPAMIDR_EL1_VPMR_MAX_SHIFT, \ 386*ed804406SRohit Mathew #MPAMIDR_EL1_VPMR_MAX_WIDTH 3872825946eSMax Shvetsov 388*ed804406SRohit Mathew /* 389*ed804406SRohit Mathew * Once VPMR_MAX has been identified, calculate the offset relative to 390*ed804406SRohit Mathew * PC to jump to so that relevant context can be restored. The offset is 391*ed804406SRohit Mathew * calculated as (VPMR_POSSIBLE_MAX - VPMR_MAX) * (instruction size for 392*ed804406SRohit Mathew * restoring one VPM register) + (absolute address of label "1"). 393*ed804406SRohit Mathew */ 394*ed804406SRohit Mathew mov w11, #MPAMIDR_EL1_VPMR_MAX_POSSIBLE 395*ed804406SRohit Mathew sub w10, w11, w10 3962825946eSMax Shvetsov 397*ed804406SRohit Mathew /* Calculate the size of one block of MPAMVPM*_EL2 restore */ 398*ed804406SRohit Mathew adr x11, 1f 399*ed804406SRohit Mathew adr x12, 2f 400*ed804406SRohit Mathew sub x12, x12, x11 401*ed804406SRohit Mathew 402*ed804406SRohit Mathew madd x10, x10, x12, x11 403*ed804406SRohit Mathew br x10 404*ed804406SRohit Mathew 405*ed804406SRohit Mathew /* 406*ed804406SRohit Mathew * The branch above would land properly on one of the blocks following 407*ed804406SRohit Mathew * label "1". Make sure that the order of restore is retained. 408*ed804406SRohit Mathew */ 409*ed804406SRohit Mathew1: 410*ed804406SRohit Mathew 411*ed804406SRohit Mathew#if ENABLE_BTI 412*ed804406SRohit Mathew bti j 413*ed804406SRohit Mathew#endif 414*ed804406SRohit Mathew ldr x10, [x0, #CTX_MPAMVPM7_EL2] 415*ed804406SRohit Mathew msr MPAMVPM7_EL2, x10 416*ed804406SRohit Mathew2: 417*ed804406SRohit Mathew#if ENABLE_BTI 418*ed804406SRohit Mathew bti j 419*ed804406SRohit Mathew#endif 420*ed804406SRohit Mathew ldr x11, [x0, #CTX_MPAMVPM6_EL2] 421*ed804406SRohit Mathew msr MPAMVPM6_EL2, x11 422*ed804406SRohit Mathew 423*ed804406SRohit Mathew#if ENABLE_BTI 424*ed804406SRohit Mathew bti j 425*ed804406SRohit Mathew#endif 426*ed804406SRohit Mathew ldr x12, [x0, #CTX_MPAMVPM5_EL2] 427*ed804406SRohit Mathew msr MPAMVPM5_EL2, x12 428*ed804406SRohit Mathew 429*ed804406SRohit Mathew#if ENABLE_BTI 430*ed804406SRohit Mathew bti j 431*ed804406SRohit Mathew#endif 432*ed804406SRohit Mathew ldr x13, [x0, #CTX_MPAMVPM4_EL2] 433*ed804406SRohit Mathew msr MPAMVPM4_EL2, x13 434*ed804406SRohit Mathew 435*ed804406SRohit Mathew#if ENABLE_BTI 436*ed804406SRohit Mathew bti j 437*ed804406SRohit Mathew#endif 438*ed804406SRohit Mathew ldr x14, [x0, #CTX_MPAMVPM3_EL2] 439*ed804406SRohit Mathew msr MPAMVPM3_EL2, x14 440*ed804406SRohit Mathew 441*ed804406SRohit Mathew#if ENABLE_BTI 442*ed804406SRohit Mathew bti j 443*ed804406SRohit Mathew#endif 444*ed804406SRohit Mathew ldr x15, [x0, #CTX_MPAMVPM2_EL2] 445*ed804406SRohit Mathew msr MPAMVPM2_EL2, x15 446*ed804406SRohit Mathew 447*ed804406SRohit Mathew#if ENABLE_BTI 448*ed804406SRohit Mathew bti j 449*ed804406SRohit Mathew#endif 450*ed804406SRohit Mathew ldr x16, [x0, #CTX_MPAMVPM1_EL2] 451*ed804406SRohit Mathew msr MPAMVPM1_EL2, x16 452*ed804406SRohit Mathew 453*ed804406SRohit Mathew3: ret 454d20052f3SZelalem Awekeendfunc el2_sysregs_context_restore_mpam 4550ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_MPAM_FOR_LOWER_ELS */ 4562825946eSMax Shvetsov 457f74cb0beSJayanth Dodderi Chidanand#if ENABLE_FEAT_ECV 458d20052f3SZelalem Awekefunc el2_sysregs_context_save_ecv 459d20052f3SZelalem Aweke mrs x11, CNTPOFF_EL2 460d20052f3SZelalem Aweke str x11, [x0, #CTX_CNTPOFF_EL2] 461d20052f3SZelalem Aweke ret 462d20052f3SZelalem Awekeendfunc el2_sysregs_context_save_ecv 463d20052f3SZelalem Aweke 464d20052f3SZelalem Awekefunc el2_sysregs_context_restore_ecv 465a7cf2743SMax Shvetsov ldr x11, [x0, #CTX_CNTPOFF_EL2] 466a7cf2743SMax Shvetsov msr CNTPOFF_EL2, x11 467d20052f3SZelalem Aweke ret 468d20052f3SZelalem Awekeendfunc el2_sysregs_context_restore_ecv 4690ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_FEAT_ECV */ 4702825946eSMax Shvetsov 4710ce220afSJayanth Dodderi Chidanand#if ENABLE_FEAT_VHE 472d20052f3SZelalem Awekefunc el2_sysregs_context_save_vhe 473d20052f3SZelalem Aweke /* 474d20052f3SZelalem Aweke * CONTEXTIDR_EL2 register is saved only when FEAT_VHE or 475d20052f3SZelalem Aweke * FEAT_Debugv8p2 (currently not in TF-A) is supported. 476d20052f3SZelalem Aweke */ 477d20052f3SZelalem Aweke mrs x9, contextidr_el2 478d20052f3SZelalem Aweke mrs x10, ttbr1_el2 479d20052f3SZelalem Aweke stp x9, x10, [x0, #CTX_CONTEXTIDR_EL2] 480d20052f3SZelalem Aweke ret 481d20052f3SZelalem Awekeendfunc el2_sysregs_context_save_vhe 482d20052f3SZelalem Aweke 483d20052f3SZelalem Awekefunc el2_sysregs_context_restore_vhe 4840ce220afSJayanth Dodderi Chidanand /* 4850ce220afSJayanth Dodderi Chidanand * CONTEXTIDR_EL2 register is restored only when FEAT_VHE or 4860ce220afSJayanth Dodderi Chidanand * FEAT_Debugv8p2 (currently not in TF-A) is supported. 4870ce220afSJayanth Dodderi Chidanand */ 4880ce220afSJayanth Dodderi Chidanand ldp x9, x10, [x0, #CTX_CONTEXTIDR_EL2] 4890ce220afSJayanth Dodderi Chidanand msr contextidr_el2, x9 4900ce220afSJayanth Dodderi Chidanand msr ttbr1_el2, x10 491d20052f3SZelalem Aweke ret 492d20052f3SZelalem Awekeendfunc el2_sysregs_context_restore_vhe 4930ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_FEAT_VHE */ 4942825946eSMax Shvetsov 4950ce220afSJayanth Dodderi Chidanand#if RAS_EXTENSION 496d20052f3SZelalem Awekefunc el2_sysregs_context_save_ras 497d20052f3SZelalem Aweke /* 498d20052f3SZelalem Aweke * VDISR_EL2 and VSESR_EL2 registers are saved only when 499d20052f3SZelalem Aweke * FEAT_RAS is supported. 500d20052f3SZelalem Aweke */ 501d20052f3SZelalem Aweke mrs x11, vdisr_el2 502d20052f3SZelalem Aweke mrs x12, vsesr_el2 503d20052f3SZelalem Aweke stp x11, x12, [x0, #CTX_VDISR_EL2] 504d20052f3SZelalem Aweke ret 505d20052f3SZelalem Awekeendfunc el2_sysregs_context_save_ras 506d20052f3SZelalem Aweke 507d20052f3SZelalem Awekefunc el2_sysregs_context_restore_ras 5080ce220afSJayanth Dodderi Chidanand /* 5090ce220afSJayanth Dodderi Chidanand * VDISR_EL2 and VSESR_EL2 registers are restored only when FEAT_RAS 5100ce220afSJayanth Dodderi Chidanand * is supported. 5110ce220afSJayanth Dodderi Chidanand */ 5120ce220afSJayanth Dodderi Chidanand ldp x11, x12, [x0, #CTX_VDISR_EL2] 5130ce220afSJayanth Dodderi Chidanand msr vdisr_el2, x11 5140ce220afSJayanth Dodderi Chidanand msr vsesr_el2, x12 515d20052f3SZelalem Aweke ret 516d20052f3SZelalem Awekeendfunc el2_sysregs_context_restore_ras 5170ce220afSJayanth Dodderi Chidanand#endif /* RAS_EXTENSION */ 5180ce220afSJayanth Dodderi Chidanand 519062f8aafSArunachalam Ganapathy#if CTX_INCLUDE_NEVE_REGS 520d20052f3SZelalem Awekefunc el2_sysregs_context_save_nv2 521d20052f3SZelalem Aweke /* 522d20052f3SZelalem Aweke * VNCR_EL2 register is saved only when FEAT_NV2 is supported. 523d20052f3SZelalem Aweke */ 524d20052f3SZelalem Aweke mrs x16, vncr_el2 525d20052f3SZelalem Aweke str x16, [x0, #CTX_VNCR_EL2] 526d20052f3SZelalem Aweke ret 527d20052f3SZelalem Awekeendfunc el2_sysregs_context_save_nv2 528d20052f3SZelalem Aweke 529d20052f3SZelalem Awekefunc el2_sysregs_context_restore_nv2 5300ce220afSJayanth Dodderi Chidanand /* 5310ce220afSJayanth Dodderi Chidanand * VNCR_EL2 register is restored only when FEAT_NV2 is supported. 5320ce220afSJayanth Dodderi Chidanand */ 533a7cf2743SMax Shvetsov ldr x16, [x0, #CTX_VNCR_EL2] 534a7cf2743SMax Shvetsov msr vncr_el2, x16 535d20052f3SZelalem Aweke ret 536d20052f3SZelalem Awekeendfunc el2_sysregs_context_restore_nv2 5370ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_NEVE_REGS */ 5382825946eSMax Shvetsov 5390ce220afSJayanth Dodderi Chidanand#if ENABLE_TRF_FOR_NS 540d20052f3SZelalem Awekefunc el2_sysregs_context_save_trf 541d20052f3SZelalem Aweke /* 542d20052f3SZelalem Aweke * TRFCR_EL2 register is saved only when FEAT_TRF is supported. 543d20052f3SZelalem Aweke */ 544d20052f3SZelalem Aweke mrs x12, TRFCR_EL2 545d20052f3SZelalem Aweke str x12, [x0, #CTX_TRFCR_EL2] 546d20052f3SZelalem Aweke ret 547d20052f3SZelalem Awekeendfunc el2_sysregs_context_save_trf 548d20052f3SZelalem Aweke 549d20052f3SZelalem Awekefunc el2_sysregs_context_restore_trf 5500ce220afSJayanth Dodderi Chidanand /* 5510ce220afSJayanth Dodderi Chidanand * TRFCR_EL2 register is restored only when FEAT_TRF is supported. 5520ce220afSJayanth Dodderi Chidanand */ 5530ce220afSJayanth Dodderi Chidanand ldr x12, [x0, #CTX_TRFCR_EL2] 554a7cf2743SMax Shvetsov msr TRFCR_EL2, x12 555d20052f3SZelalem Aweke ret 556d20052f3SZelalem Awekeendfunc el2_sysregs_context_restore_trf 5570ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_TRF_FOR_NS */ 5582825946eSMax Shvetsov 5590ce220afSJayanth Dodderi Chidanand#if ENABLE_FEAT_CSV2_2 560d20052f3SZelalem Awekefunc el2_sysregs_context_save_csv2 561d20052f3SZelalem Aweke /* 562d20052f3SZelalem Aweke * SCXTNUM_EL2 register is saved only when FEAT_CSV2_2 is supported. 563d20052f3SZelalem Aweke */ 564d20052f3SZelalem Aweke mrs x13, scxtnum_el2 565d20052f3SZelalem Aweke str x13, [x0, #CTX_SCXTNUM_EL2] 566d20052f3SZelalem Aweke ret 567d20052f3SZelalem Awekeendfunc el2_sysregs_context_save_csv2 568d20052f3SZelalem Aweke 569d20052f3SZelalem Awekefunc el2_sysregs_context_restore_csv2 5700ce220afSJayanth Dodderi Chidanand /* 5710ce220afSJayanth Dodderi Chidanand * SCXTNUM_EL2 register is restored only when FEAT_CSV2_2 is supported. 5720ce220afSJayanth Dodderi Chidanand */ 573a7cf2743SMax Shvetsov ldr x13, [x0, #CTX_SCXTNUM_EL2] 574a7cf2743SMax Shvetsov msr scxtnum_el2, x13 575d20052f3SZelalem Aweke ret 576d20052f3SZelalem Awekeendfunc el2_sysregs_context_restore_csv2 5770ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_FEAT_CSV2_2 */ 57828f39f02SMax Shvetsov 57928f39f02SMax Shvetsov#endif /* CTX_INCLUDE_EL2_REGS */ 58028f39f02SMax Shvetsov 581ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 582ed108b56SAlexei Fedorov * The following function strictly follows the AArch64 PCS to use 583ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers) to save EL1 system 584ed108b56SAlexei Fedorov * register context. It assumes that 'x0' is pointing to a 585ed108b56SAlexei Fedorov * 'el1_sys_regs' structure where the register context will be saved. 586ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 587532ed618SSoby Mathew */ 588532ed618SSoby Mathewfunc el1_sysregs_context_save 589532ed618SSoby Mathew 590532ed618SSoby Mathew mrs x9, spsr_el1 591532ed618SSoby Mathew mrs x10, elr_el1 592532ed618SSoby Mathew stp x9, x10, [x0, #CTX_SPSR_EL1] 593532ed618SSoby Mathew 5943b8456bdSManish V Badarkhe#if !ERRATA_SPECULATIVE_AT 595532ed618SSoby Mathew mrs x15, sctlr_el1 596cb55615cSManish V Badarkhe mrs x16, tcr_el1 597532ed618SSoby Mathew stp x15, x16, [x0, #CTX_SCTLR_EL1] 5980ce220afSJayanth Dodderi Chidanand#endif /* ERRATA_SPECULATIVE_AT */ 599532ed618SSoby Mathew 600532ed618SSoby Mathew mrs x17, cpacr_el1 601532ed618SSoby Mathew mrs x9, csselr_el1 602532ed618SSoby Mathew stp x17, x9, [x0, #CTX_CPACR_EL1] 603532ed618SSoby Mathew 604532ed618SSoby Mathew mrs x10, sp_el1 605532ed618SSoby Mathew mrs x11, esr_el1 606532ed618SSoby Mathew stp x10, x11, [x0, #CTX_SP_EL1] 607532ed618SSoby Mathew 608532ed618SSoby Mathew mrs x12, ttbr0_el1 609532ed618SSoby Mathew mrs x13, ttbr1_el1 610532ed618SSoby Mathew stp x12, x13, [x0, #CTX_TTBR0_EL1] 611532ed618SSoby Mathew 612532ed618SSoby Mathew mrs x14, mair_el1 613532ed618SSoby Mathew mrs x15, amair_el1 614532ed618SSoby Mathew stp x14, x15, [x0, #CTX_MAIR_EL1] 615532ed618SSoby Mathew 616cb55615cSManish V Badarkhe mrs x16, actlr_el1 617532ed618SSoby Mathew mrs x17, tpidr_el1 618cb55615cSManish V Badarkhe stp x16, x17, [x0, #CTX_ACTLR_EL1] 619532ed618SSoby Mathew 620532ed618SSoby Mathew mrs x9, tpidr_el0 621532ed618SSoby Mathew mrs x10, tpidrro_el0 622532ed618SSoby Mathew stp x9, x10, [x0, #CTX_TPIDR_EL0] 623532ed618SSoby Mathew 624532ed618SSoby Mathew mrs x13, par_el1 625532ed618SSoby Mathew mrs x14, far_el1 626532ed618SSoby Mathew stp x13, x14, [x0, #CTX_PAR_EL1] 627532ed618SSoby Mathew 628532ed618SSoby Mathew mrs x15, afsr0_el1 629532ed618SSoby Mathew mrs x16, afsr1_el1 630532ed618SSoby Mathew stp x15, x16, [x0, #CTX_AFSR0_EL1] 631532ed618SSoby Mathew 632532ed618SSoby Mathew mrs x17, contextidr_el1 633532ed618SSoby Mathew mrs x9, vbar_el1 634532ed618SSoby Mathew stp x17, x9, [x0, #CTX_CONTEXTIDR_EL1] 635532ed618SSoby Mathew 636532ed618SSoby Mathew /* Save AArch32 system registers if the build has instructed so */ 637532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS 638532ed618SSoby Mathew mrs x11, spsr_abt 639532ed618SSoby Mathew mrs x12, spsr_und 640532ed618SSoby Mathew stp x11, x12, [x0, #CTX_SPSR_ABT] 641532ed618SSoby Mathew 642532ed618SSoby Mathew mrs x13, spsr_irq 643532ed618SSoby Mathew mrs x14, spsr_fiq 644532ed618SSoby Mathew stp x13, x14, [x0, #CTX_SPSR_IRQ] 645532ed618SSoby Mathew 646532ed618SSoby Mathew mrs x15, dacr32_el2 647532ed618SSoby Mathew mrs x16, ifsr32_el2 648532ed618SSoby Mathew stp x15, x16, [x0, #CTX_DACR32_EL2] 6490ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */ 650532ed618SSoby Mathew 651532ed618SSoby Mathew /* Save NS timer registers if the build has instructed so */ 652532ed618SSoby Mathew#if NS_TIMER_SWITCH 653532ed618SSoby Mathew mrs x10, cntp_ctl_el0 654532ed618SSoby Mathew mrs x11, cntp_cval_el0 655532ed618SSoby Mathew stp x10, x11, [x0, #CTX_CNTP_CTL_EL0] 656532ed618SSoby Mathew 657532ed618SSoby Mathew mrs x12, cntv_ctl_el0 658532ed618SSoby Mathew mrs x13, cntv_cval_el0 659532ed618SSoby Mathew stp x12, x13, [x0, #CTX_CNTV_CTL_EL0] 660532ed618SSoby Mathew 661532ed618SSoby Mathew mrs x14, cntkctl_el1 662532ed618SSoby Mathew str x14, [x0, #CTX_CNTKCTL_EL1] 6630ce220afSJayanth Dodderi Chidanand#endif /* NS_TIMER_SWITCH */ 664532ed618SSoby Mathew 6659dd94382SJustin Chadwell /* Save MTE system registers if the build has instructed so */ 6669dd94382SJustin Chadwell#if CTX_INCLUDE_MTE_REGS 6679dd94382SJustin Chadwell mrs x15, TFSRE0_EL1 6689dd94382SJustin Chadwell mrs x16, TFSR_EL1 6699dd94382SJustin Chadwell stp x15, x16, [x0, #CTX_TFSRE0_EL1] 6709dd94382SJustin Chadwell 6719dd94382SJustin Chadwell mrs x9, RGSR_EL1 6729dd94382SJustin Chadwell mrs x10, GCR_EL1 6739dd94382SJustin Chadwell stp x9, x10, [x0, #CTX_RGSR_EL1] 6740ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_MTE_REGS */ 6759dd94382SJustin Chadwell 676532ed618SSoby Mathew ret 677532ed618SSoby Mathewendfunc el1_sysregs_context_save 678532ed618SSoby Mathew 679ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 680ed108b56SAlexei Fedorov * The following function strictly follows the AArch64 PCS to use 681ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers) to restore EL1 system 682ed108b56SAlexei Fedorov * register context. It assumes that 'x0' is pointing to a 683ed108b56SAlexei Fedorov * 'el1_sys_regs' structure from where the register context will be 684ed108b56SAlexei Fedorov * restored 685ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 686532ed618SSoby Mathew */ 687532ed618SSoby Mathewfunc el1_sysregs_context_restore 688532ed618SSoby Mathew 689532ed618SSoby Mathew ldp x9, x10, [x0, #CTX_SPSR_EL1] 690532ed618SSoby Mathew msr spsr_el1, x9 691532ed618SSoby Mathew msr elr_el1, x10 692532ed618SSoby Mathew 6933b8456bdSManish V Badarkhe#if !ERRATA_SPECULATIVE_AT 694fb2072b0SManish V Badarkhe ldp x15, x16, [x0, #CTX_SCTLR_EL1] 695fb2072b0SManish V Badarkhe msr sctlr_el1, x15 696cb55615cSManish V Badarkhe msr tcr_el1, x16 6970ce220afSJayanth Dodderi Chidanand#endif /* ERRATA_SPECULATIVE_AT */ 698532ed618SSoby Mathew 699532ed618SSoby Mathew ldp x17, x9, [x0, #CTX_CPACR_EL1] 700532ed618SSoby Mathew msr cpacr_el1, x17 701532ed618SSoby Mathew msr csselr_el1, x9 702532ed618SSoby Mathew 703532ed618SSoby Mathew ldp x10, x11, [x0, #CTX_SP_EL1] 704532ed618SSoby Mathew msr sp_el1, x10 705532ed618SSoby Mathew msr esr_el1, x11 706532ed618SSoby Mathew 707532ed618SSoby Mathew ldp x12, x13, [x0, #CTX_TTBR0_EL1] 708532ed618SSoby Mathew msr ttbr0_el1, x12 709532ed618SSoby Mathew msr ttbr1_el1, x13 710532ed618SSoby Mathew 711532ed618SSoby Mathew ldp x14, x15, [x0, #CTX_MAIR_EL1] 712532ed618SSoby Mathew msr mair_el1, x14 713532ed618SSoby Mathew msr amair_el1, x15 714532ed618SSoby Mathew 715cb55615cSManish V Badarkhe ldp x16, x17, [x0, #CTX_ACTLR_EL1] 716cb55615cSManish V Badarkhe msr actlr_el1, x16 717fb2072b0SManish V Badarkhe msr tpidr_el1, x17 718532ed618SSoby Mathew 719532ed618SSoby Mathew ldp x9, x10, [x0, #CTX_TPIDR_EL0] 720532ed618SSoby Mathew msr tpidr_el0, x9 721532ed618SSoby Mathew msr tpidrro_el0, x10 722532ed618SSoby Mathew 723532ed618SSoby Mathew ldp x13, x14, [x0, #CTX_PAR_EL1] 724532ed618SSoby Mathew msr par_el1, x13 725532ed618SSoby Mathew msr far_el1, x14 726532ed618SSoby Mathew 727532ed618SSoby Mathew ldp x15, x16, [x0, #CTX_AFSR0_EL1] 728532ed618SSoby Mathew msr afsr0_el1, x15 729532ed618SSoby Mathew msr afsr1_el1, x16 730532ed618SSoby Mathew 731532ed618SSoby Mathew ldp x17, x9, [x0, #CTX_CONTEXTIDR_EL1] 732532ed618SSoby Mathew msr contextidr_el1, x17 733532ed618SSoby Mathew msr vbar_el1, x9 734532ed618SSoby Mathew 735532ed618SSoby Mathew /* Restore AArch32 system registers if the build has instructed so */ 736532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS 737532ed618SSoby Mathew ldp x11, x12, [x0, #CTX_SPSR_ABT] 738532ed618SSoby Mathew msr spsr_abt, x11 739532ed618SSoby Mathew msr spsr_und, x12 740532ed618SSoby Mathew 741532ed618SSoby Mathew ldp x13, x14, [x0, #CTX_SPSR_IRQ] 742532ed618SSoby Mathew msr spsr_irq, x13 743532ed618SSoby Mathew msr spsr_fiq, x14 744532ed618SSoby Mathew 745532ed618SSoby Mathew ldp x15, x16, [x0, #CTX_DACR32_EL2] 746532ed618SSoby Mathew msr dacr32_el2, x15 747532ed618SSoby Mathew msr ifsr32_el2, x16 7480ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */ 7490ce220afSJayanth Dodderi Chidanand 750532ed618SSoby Mathew /* Restore NS timer registers if the build has instructed so */ 751532ed618SSoby Mathew#if NS_TIMER_SWITCH 752532ed618SSoby Mathew ldp x10, x11, [x0, #CTX_CNTP_CTL_EL0] 753532ed618SSoby Mathew msr cntp_ctl_el0, x10 754532ed618SSoby Mathew msr cntp_cval_el0, x11 755532ed618SSoby Mathew 756532ed618SSoby Mathew ldp x12, x13, [x0, #CTX_CNTV_CTL_EL0] 757532ed618SSoby Mathew msr cntv_ctl_el0, x12 758532ed618SSoby Mathew msr cntv_cval_el0, x13 759532ed618SSoby Mathew 760532ed618SSoby Mathew ldr x14, [x0, #CTX_CNTKCTL_EL1] 761532ed618SSoby Mathew msr cntkctl_el1, x14 7620ce220afSJayanth Dodderi Chidanand#endif /* NS_TIMER_SWITCH */ 7630ce220afSJayanth Dodderi Chidanand 7649dd94382SJustin Chadwell /* Restore MTE system registers if the build has instructed so */ 7659dd94382SJustin Chadwell#if CTX_INCLUDE_MTE_REGS 7669dd94382SJustin Chadwell ldp x11, x12, [x0, #CTX_TFSRE0_EL1] 7679dd94382SJustin Chadwell msr TFSRE0_EL1, x11 7689dd94382SJustin Chadwell msr TFSR_EL1, x12 7699dd94382SJustin Chadwell 7709dd94382SJustin Chadwell ldp x13, x14, [x0, #CTX_RGSR_EL1] 7719dd94382SJustin Chadwell msr RGSR_EL1, x13 7729dd94382SJustin Chadwell msr GCR_EL1, x14 7730ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_MTE_REGS */ 774532ed618SSoby Mathew 775532ed618SSoby Mathew /* No explict ISB required here as ERET covers it */ 776532ed618SSoby Mathew ret 777532ed618SSoby Mathewendfunc el1_sysregs_context_restore 778532ed618SSoby Mathew 779ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 780ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use 781ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers according to AArch64 PCS) 782ed108b56SAlexei Fedorov * to save floating point register context. It assumes that 'x0' is 783ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure where the register context will 784532ed618SSoby Mathew * be saved. 785532ed618SSoby Mathew * 786ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set. 787ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in 788ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared. 789532ed618SSoby Mathew * 790532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world 791ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 792532ed618SSoby Mathew */ 793532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS 794532ed618SSoby Mathewfunc fpregs_context_save 795532ed618SSoby Mathew stp q0, q1, [x0, #CTX_FP_Q0] 796532ed618SSoby Mathew stp q2, q3, [x0, #CTX_FP_Q2] 797532ed618SSoby Mathew stp q4, q5, [x0, #CTX_FP_Q4] 798532ed618SSoby Mathew stp q6, q7, [x0, #CTX_FP_Q6] 799532ed618SSoby Mathew stp q8, q9, [x0, #CTX_FP_Q8] 800532ed618SSoby Mathew stp q10, q11, [x0, #CTX_FP_Q10] 801532ed618SSoby Mathew stp q12, q13, [x0, #CTX_FP_Q12] 802532ed618SSoby Mathew stp q14, q15, [x0, #CTX_FP_Q14] 803532ed618SSoby Mathew stp q16, q17, [x0, #CTX_FP_Q16] 804532ed618SSoby Mathew stp q18, q19, [x0, #CTX_FP_Q18] 805532ed618SSoby Mathew stp q20, q21, [x0, #CTX_FP_Q20] 806532ed618SSoby Mathew stp q22, q23, [x0, #CTX_FP_Q22] 807532ed618SSoby Mathew stp q24, q25, [x0, #CTX_FP_Q24] 808532ed618SSoby Mathew stp q26, q27, [x0, #CTX_FP_Q26] 809532ed618SSoby Mathew stp q28, q29, [x0, #CTX_FP_Q28] 810532ed618SSoby Mathew stp q30, q31, [x0, #CTX_FP_Q30] 811532ed618SSoby Mathew 812532ed618SSoby Mathew mrs x9, fpsr 813532ed618SSoby Mathew str x9, [x0, #CTX_FP_FPSR] 814532ed618SSoby Mathew 815532ed618SSoby Mathew mrs x10, fpcr 816532ed618SSoby Mathew str x10, [x0, #CTX_FP_FPCR] 817532ed618SSoby Mathew 81891089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS 81991089f36SDavid Cunado mrs x11, fpexc32_el2 82091089f36SDavid Cunado str x11, [x0, #CTX_FP_FPEXC32_EL2] 8210ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */ 822532ed618SSoby Mathew ret 823532ed618SSoby Mathewendfunc fpregs_context_save 824532ed618SSoby Mathew 825ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 826ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use x9-x17 827ed108b56SAlexei Fedorov * (temporary caller-saved registers according to AArch64 PCS) to 828ed108b56SAlexei Fedorov * restore floating point register context. It assumes that 'x0' is 829ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure from where the register context 830532ed618SSoby Mathew * will be restored. 831532ed618SSoby Mathew * 832ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set. 833ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in 834ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared. 835532ed618SSoby Mathew * 836532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world 837ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 838532ed618SSoby Mathew */ 839532ed618SSoby Mathewfunc fpregs_context_restore 840532ed618SSoby Mathew ldp q0, q1, [x0, #CTX_FP_Q0] 841532ed618SSoby Mathew ldp q2, q3, [x0, #CTX_FP_Q2] 842532ed618SSoby Mathew ldp q4, q5, [x0, #CTX_FP_Q4] 843532ed618SSoby Mathew ldp q6, q7, [x0, #CTX_FP_Q6] 844532ed618SSoby Mathew ldp q8, q9, [x0, #CTX_FP_Q8] 845532ed618SSoby Mathew ldp q10, q11, [x0, #CTX_FP_Q10] 846532ed618SSoby Mathew ldp q12, q13, [x0, #CTX_FP_Q12] 847532ed618SSoby Mathew ldp q14, q15, [x0, #CTX_FP_Q14] 848532ed618SSoby Mathew ldp q16, q17, [x0, #CTX_FP_Q16] 849532ed618SSoby Mathew ldp q18, q19, [x0, #CTX_FP_Q18] 850532ed618SSoby Mathew ldp q20, q21, [x0, #CTX_FP_Q20] 851532ed618SSoby Mathew ldp q22, q23, [x0, #CTX_FP_Q22] 852532ed618SSoby Mathew ldp q24, q25, [x0, #CTX_FP_Q24] 853532ed618SSoby Mathew ldp q26, q27, [x0, #CTX_FP_Q26] 854532ed618SSoby Mathew ldp q28, q29, [x0, #CTX_FP_Q28] 855532ed618SSoby Mathew ldp q30, q31, [x0, #CTX_FP_Q30] 856532ed618SSoby Mathew 857532ed618SSoby Mathew ldr x9, [x0, #CTX_FP_FPSR] 858532ed618SSoby Mathew msr fpsr, x9 859532ed618SSoby Mathew 860532ed618SSoby Mathew ldr x10, [x0, #CTX_FP_FPCR] 861532ed618SSoby Mathew msr fpcr, x10 862532ed618SSoby Mathew 86391089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS 86491089f36SDavid Cunado ldr x11, [x0, #CTX_FP_FPEXC32_EL2] 86591089f36SDavid Cunado msr fpexc32_el2, x11 8660ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */ 8670ce220afSJayanth Dodderi Chidanand 868532ed618SSoby Mathew /* 869532ed618SSoby Mathew * No explict ISB required here as ERET to 870532ed618SSoby Mathew * switch to secure EL1 or non-secure world 871532ed618SSoby Mathew * covers it 872532ed618SSoby Mathew */ 873532ed618SSoby Mathew 874532ed618SSoby Mathew ret 875532ed618SSoby Mathewendfunc fpregs_context_restore 876532ed618SSoby Mathew#endif /* CTX_INCLUDE_FPREGS */ 877532ed618SSoby Mathew 8787d33ffe4SDaniel Boulby /* 8791cbe42a5SManish Pandey * Set SCR_EL3.EA bit to enable SErrors at EL3 8801cbe42a5SManish Pandey */ 8811cbe42a5SManish Pandey .macro enable_serror_at_el3 8821cbe42a5SManish Pandey mrs x8, scr_el3 8831cbe42a5SManish Pandey orr x8, x8, #SCR_EA_BIT 8841cbe42a5SManish Pandey msr scr_el3, x8 8851cbe42a5SManish Pandey .endm 8861cbe42a5SManish Pandey 8871cbe42a5SManish Pandey /* 8887d33ffe4SDaniel Boulby * Set the PSTATE bits not set when the exception was taken as 8897d33ffe4SDaniel Boulby * described in the AArch64.TakeException() pseudocode function 8907d33ffe4SDaniel Boulby * in ARM DDI 0487F.c page J1-7635 to a default value. 8917d33ffe4SDaniel Boulby */ 8927d33ffe4SDaniel Boulby .macro set_unset_pstate_bits 8937d33ffe4SDaniel Boulby /* 8947d33ffe4SDaniel Boulby * If Data Independent Timing (DIT) functionality is implemented, 8957d33ffe4SDaniel Boulby * always enable DIT in EL3 8967d33ffe4SDaniel Boulby */ 8977d33ffe4SDaniel Boulby#if ENABLE_FEAT_DIT 8987d33ffe4SDaniel Boulby mov x8, #DIT_BIT 8997d33ffe4SDaniel Boulby msr DIT, x8 9007d33ffe4SDaniel Boulby#endif /* ENABLE_FEAT_DIT */ 9017d33ffe4SDaniel Boulby .endm /* set_unset_pstate_bits */ 9027d33ffe4SDaniel Boulby 903ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 90497215e0fSDaniel Boulby * The following macro is used to save and restore all the general 905ed108b56SAlexei Fedorov * purpose and ARMv8.3-PAuth (if enabled) registers. 906d64bfef5SJayanth Dodderi Chidanand * It also checks if the Secure Cycle Counter (PMCCNTR_EL0) 907d64bfef5SJayanth Dodderi Chidanand * is disabled in EL3/Secure (ARMv8.5-PMU), wherein PMCCNTR_EL0 908d64bfef5SJayanth Dodderi Chidanand * needs not to be saved/restored during world switch. 909ed108b56SAlexei Fedorov * 910ed108b56SAlexei Fedorov * Ideally we would only save and restore the callee saved registers 911ed108b56SAlexei Fedorov * when a world switch occurs but that type of implementation is more 912ed108b56SAlexei Fedorov * complex. So currently we will always save and restore these 913ed108b56SAlexei Fedorov * registers on entry and exit of EL3. 914532ed618SSoby Mathew * clobbers: x18 915ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 916532ed618SSoby Mathew */ 91797215e0fSDaniel Boulby .macro save_gp_pmcr_pauth_regs 918532ed618SSoby Mathew stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 919532ed618SSoby Mathew stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 920532ed618SSoby Mathew stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 921532ed618SSoby Mathew stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] 922532ed618SSoby Mathew stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] 923532ed618SSoby Mathew stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] 924532ed618SSoby Mathew stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] 925532ed618SSoby Mathew stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] 926532ed618SSoby Mathew stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] 927532ed618SSoby Mathew stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] 928532ed618SSoby Mathew stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] 929532ed618SSoby Mathew stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] 930532ed618SSoby Mathew stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] 931532ed618SSoby Mathew stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] 932532ed618SSoby Mathew stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] 933532ed618SSoby Mathew mrs x18, sp_el0 934532ed618SSoby Mathew str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] 935532ed618SSoby Mathew 936ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 937d64bfef5SJayanth Dodderi Chidanand * Check if earlier initialization of MDCR_EL3.SCCD/MCCD to 1 938d64bfef5SJayanth Dodderi Chidanand * has failed. 939d64bfef5SJayanth Dodderi Chidanand * 940d64bfef5SJayanth Dodderi Chidanand * MDCR_EL3: 941d64bfef5SJayanth Dodderi Chidanand * MCCD bit set, Prohibits the Cycle Counter PMCCNTR_EL0 from 942d64bfef5SJayanth Dodderi Chidanand * counting at EL3. 943d64bfef5SJayanth Dodderi Chidanand * SCCD bit set, Secure Cycle Counter Disable. Prohibits PMCCNTR_EL0 944d64bfef5SJayanth Dodderi Chidanand * from counting in Secure state. 945d64bfef5SJayanth Dodderi Chidanand * If these bits are not set, meaning that FEAT_PMUv3p5/7 is 946d64bfef5SJayanth Dodderi Chidanand * not implemented and PMCR_EL0 should be saved in non-secure 947d64bfef5SJayanth Dodderi Chidanand * context. 948ed108b56SAlexei Fedorov * ---------------------------------------------------------- 949ef653d93SJeenu Viswambharan */ 95012f6c064SAlexei Fedorov mov_imm x10, (MDCR_SCCD_BIT | MDCR_MCCD_BIT) 951ed108b56SAlexei Fedorov mrs x9, mdcr_el3 95212f6c064SAlexei Fedorov tst x9, x10 953ed108b56SAlexei Fedorov bne 1f 954ed108b56SAlexei Fedorov 955d64bfef5SJayanth Dodderi Chidanand /* ---------------------------------------------------------- 956d64bfef5SJayanth Dodderi Chidanand * If control reaches here, it ensures the Secure Cycle 957d64bfef5SJayanth Dodderi Chidanand * Counter (PMCCNTR_EL0) is not prohibited from counting at 958d64bfef5SJayanth Dodderi Chidanand * EL3 and in secure states. 959d64bfef5SJayanth Dodderi Chidanand * Henceforth, PMCR_EL0 to be saved before world switch. 960d64bfef5SJayanth Dodderi Chidanand * ---------------------------------------------------------- 961d64bfef5SJayanth Dodderi Chidanand */ 962ed108b56SAlexei Fedorov mrs x9, pmcr_el0 963ed108b56SAlexei Fedorov 964ed108b56SAlexei Fedorov /* Check caller's security state */ 965ed108b56SAlexei Fedorov mrs x10, scr_el3 966ed108b56SAlexei Fedorov tst x10, #SCR_NS_BIT 967ed108b56SAlexei Fedorov beq 2f 968ed108b56SAlexei Fedorov 969ed108b56SAlexei Fedorov /* Save PMCR_EL0 if called from Non-secure state */ 970ed108b56SAlexei Fedorov str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] 971ed108b56SAlexei Fedorov 972ed108b56SAlexei Fedorov /* Disable cycle counter when event counting is prohibited */ 973ed108b56SAlexei Fedorov2: orr x9, x9, #PMCR_EL0_DP_BIT 974ed108b56SAlexei Fedorov msr pmcr_el0, x9 975ed108b56SAlexei Fedorov isb 976ed108b56SAlexei Fedorov1: 977ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS 978ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 979ed108b56SAlexei Fedorov * Save the ARMv8.3-PAuth keys as they are not banked 980ed108b56SAlexei Fedorov * by exception level 981ed108b56SAlexei Fedorov * ---------------------------------------------------------- 982ed108b56SAlexei Fedorov */ 983ed108b56SAlexei Fedorov add x19, sp, #CTX_PAUTH_REGS_OFFSET 984ed108b56SAlexei Fedorov 985ed108b56SAlexei Fedorov mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */ 986ed108b56SAlexei Fedorov mrs x21, APIAKeyHi_EL1 987ed108b56SAlexei Fedorov mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */ 988ed108b56SAlexei Fedorov mrs x23, APIBKeyHi_EL1 989ed108b56SAlexei Fedorov mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */ 990ed108b56SAlexei Fedorov mrs x25, APDAKeyHi_EL1 991ed108b56SAlexei Fedorov mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */ 992ed108b56SAlexei Fedorov mrs x27, APDBKeyHi_EL1 993ed108b56SAlexei Fedorov mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */ 994ed108b56SAlexei Fedorov mrs x29, APGAKeyHi_EL1 995ed108b56SAlexei Fedorov 996ed108b56SAlexei Fedorov stp x20, x21, [x19, #CTX_PACIAKEY_LO] 997ed108b56SAlexei Fedorov stp x22, x23, [x19, #CTX_PACIBKEY_LO] 998ed108b56SAlexei Fedorov stp x24, x25, [x19, #CTX_PACDAKEY_LO] 999ed108b56SAlexei Fedorov stp x26, x27, [x19, #CTX_PACDBKEY_LO] 1000ed108b56SAlexei Fedorov stp x28, x29, [x19, #CTX_PACGAKEY_LO] 1001ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */ 100297215e0fSDaniel Boulby .endm /* save_gp_pmcr_pauth_regs */ 100397215e0fSDaniel Boulby 100497215e0fSDaniel Boulby/* ----------------------------------------------------------------- 10057d33ffe4SDaniel Boulby * This function saves the context and sets the PSTATE to a known 10067d33ffe4SDaniel Boulby * state, preparing entry to el3. 100797215e0fSDaniel Boulby * Save all the general purpose and ARMv8.3-PAuth (if enabled) 100897215e0fSDaniel Boulby * registers. 10097d33ffe4SDaniel Boulby * Then set any of the PSTATE bits that are not set by hardware 10107d33ffe4SDaniel Boulby * according to the Aarch64.TakeException pseudocode in the Arm 10117d33ffe4SDaniel Boulby * Architecture Reference Manual to a default value for EL3. 10127d33ffe4SDaniel Boulby * clobbers: x17 101397215e0fSDaniel Boulby * ----------------------------------------------------------------- 101497215e0fSDaniel Boulby */ 101597215e0fSDaniel Boulbyfunc prepare_el3_entry 101697215e0fSDaniel Boulby save_gp_pmcr_pauth_regs 10171cbe42a5SManish Pandey enable_serror_at_el3 10187d33ffe4SDaniel Boulby /* 10197d33ffe4SDaniel Boulby * Set the PSTATE bits not described in the Aarch64.TakeException 10207d33ffe4SDaniel Boulby * pseudocode to their default values. 10217d33ffe4SDaniel Boulby */ 10227d33ffe4SDaniel Boulby set_unset_pstate_bits 1023ed108b56SAlexei Fedorov ret 102497215e0fSDaniel Boulbyendfunc prepare_el3_entry 1025ed108b56SAlexei Fedorov 1026ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 1027ed108b56SAlexei Fedorov * This function restores ARMv8.3-PAuth (if enabled) and all general 1028ed108b56SAlexei Fedorov * purpose registers except x30 from the CPU context. 1029ed108b56SAlexei Fedorov * x30 register must be explicitly restored by the caller. 1030ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 1031ed108b56SAlexei Fedorov */ 1032ed108b56SAlexei Fedorovfunc restore_gp_pmcr_pauth_regs 1033ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS 1034ed108b56SAlexei Fedorov /* Restore the ARMv8.3 PAuth keys */ 1035ed108b56SAlexei Fedorov add x10, sp, #CTX_PAUTH_REGS_OFFSET 1036ed108b56SAlexei Fedorov 1037ed108b56SAlexei Fedorov ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */ 1038ed108b56SAlexei Fedorov ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */ 1039ed108b56SAlexei Fedorov ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */ 1040ed108b56SAlexei Fedorov ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */ 1041ed108b56SAlexei Fedorov ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */ 1042ed108b56SAlexei Fedorov 1043ed108b56SAlexei Fedorov msr APIAKeyLo_EL1, x0 1044ed108b56SAlexei Fedorov msr APIAKeyHi_EL1, x1 1045ed108b56SAlexei Fedorov msr APIBKeyLo_EL1, x2 1046ed108b56SAlexei Fedorov msr APIBKeyHi_EL1, x3 1047ed108b56SAlexei Fedorov msr APDAKeyLo_EL1, x4 1048ed108b56SAlexei Fedorov msr APDAKeyHi_EL1, x5 1049ed108b56SAlexei Fedorov msr APDBKeyLo_EL1, x6 1050ed108b56SAlexei Fedorov msr APDBKeyHi_EL1, x7 1051ed108b56SAlexei Fedorov msr APGAKeyLo_EL1, x8 1052ed108b56SAlexei Fedorov msr APGAKeyHi_EL1, x9 1053ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */ 1054ed108b56SAlexei Fedorov 1055ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 1056ed108b56SAlexei Fedorov * Restore PMCR_EL0 when returning to Non-secure state if 1057ed108b56SAlexei Fedorov * Secure Cycle Counter is not disabled in MDCR_EL3 when 1058ed108b56SAlexei Fedorov * ARMv8.5-PMU is implemented. 1059ed108b56SAlexei Fedorov * ---------------------------------------------------------- 1060ed108b56SAlexei Fedorov */ 1061ed108b56SAlexei Fedorov mrs x0, scr_el3 1062ed108b56SAlexei Fedorov tst x0, #SCR_NS_BIT 1063ed108b56SAlexei Fedorov beq 2f 1064ed108b56SAlexei Fedorov 1065ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 1066ed108b56SAlexei Fedorov * Back to Non-secure state. 106712f6c064SAlexei Fedorov * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1 106812f6c064SAlexei Fedorov * failed, meaning that FEAT_PMUv3p5/7 is not implemented and 106912f6c064SAlexei Fedorov * PMCR_EL0 should be restored from non-secure context. 1070ed108b56SAlexei Fedorov * ---------------------------------------------------------- 1071ed108b56SAlexei Fedorov */ 107212f6c064SAlexei Fedorov mov_imm x1, (MDCR_SCCD_BIT | MDCR_MCCD_BIT) 1073ed108b56SAlexei Fedorov mrs x0, mdcr_el3 107412f6c064SAlexei Fedorov tst x0, x1 1075ed108b56SAlexei Fedorov bne 2f 1076ed108b56SAlexei Fedorov ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] 1077ed108b56SAlexei Fedorov msr pmcr_el0, x0 1078ed108b56SAlexei Fedorov2: 1079532ed618SSoby Mathew ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 1080532ed618SSoby Mathew ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 1081532ed618SSoby Mathew ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 1082532ed618SSoby Mathew ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] 1083532ed618SSoby Mathew ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] 1084532ed618SSoby Mathew ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] 1085532ed618SSoby Mathew ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] 1086532ed618SSoby Mathew ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] 1087ef653d93SJeenu Viswambharan ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] 1088532ed618SSoby Mathew ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] 1089532ed618SSoby Mathew ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] 1090532ed618SSoby Mathew ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] 1091532ed618SSoby Mathew ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] 1092532ed618SSoby Mathew ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] 1093ef653d93SJeenu Viswambharan ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] 1094ef653d93SJeenu Viswambharan msr sp_el0, x28 1095532ed618SSoby Mathew ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] 1096ef653d93SJeenu Viswambharan ret 1097ed108b56SAlexei Fedorovendfunc restore_gp_pmcr_pauth_regs 1098ef653d93SJeenu Viswambharan 10993b8456bdSManish V Badarkhe/* 11003b8456bdSManish V Badarkhe * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1 11013b8456bdSManish V Badarkhe * registers and update EL1 registers to disable stage1 and stage2 11023b8456bdSManish V Badarkhe * page table walk 11033b8456bdSManish V Badarkhe */ 11043b8456bdSManish V Badarkhefunc save_and_update_ptw_el1_sys_regs 11053b8456bdSManish V Badarkhe /* ---------------------------------------------------------- 11063b8456bdSManish V Badarkhe * Save only sctlr_el1 and tcr_el1 registers 11073b8456bdSManish V Badarkhe * ---------------------------------------------------------- 11083b8456bdSManish V Badarkhe */ 11093b8456bdSManish V Badarkhe mrs x29, sctlr_el1 11103b8456bdSManish V Badarkhe str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)] 11113b8456bdSManish V Badarkhe mrs x29, tcr_el1 11123b8456bdSManish V Badarkhe str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)] 11133b8456bdSManish V Badarkhe 11143b8456bdSManish V Badarkhe /* ------------------------------------------------------------ 11153b8456bdSManish V Badarkhe * Must follow below order in order to disable page table 11163b8456bdSManish V Badarkhe * walk for lower ELs (EL1 and EL0). First step ensures that 11173b8456bdSManish V Badarkhe * page table walk is disabled for stage1 and second step 11183b8456bdSManish V Badarkhe * ensures that page table walker should use TCR_EL1.EPDx 11193b8456bdSManish V Badarkhe * bits to perform address translation. ISB ensures that CPU 11203b8456bdSManish V Badarkhe * does these 2 steps in order. 11213b8456bdSManish V Badarkhe * 11223b8456bdSManish V Badarkhe * 1. Update TCR_EL1.EPDx bits to disable page table walk by 11233b8456bdSManish V Badarkhe * stage1. 11243b8456bdSManish V Badarkhe * 2. Enable MMU bit to avoid identity mapping via stage2 11253b8456bdSManish V Badarkhe * and force TCR_EL1.EPDx to be used by the page table 11263b8456bdSManish V Badarkhe * walker. 11273b8456bdSManish V Badarkhe * ------------------------------------------------------------ 11283b8456bdSManish V Badarkhe */ 11293b8456bdSManish V Badarkhe orr x29, x29, #(TCR_EPD0_BIT) 11303b8456bdSManish V Badarkhe orr x29, x29, #(TCR_EPD1_BIT) 11313b8456bdSManish V Badarkhe msr tcr_el1, x29 11323b8456bdSManish V Badarkhe isb 11333b8456bdSManish V Badarkhe mrs x29, sctlr_el1 11343b8456bdSManish V Badarkhe orr x29, x29, #SCTLR_M_BIT 11353b8456bdSManish V Badarkhe msr sctlr_el1, x29 11363b8456bdSManish V Badarkhe isb 11373b8456bdSManish V Badarkhe 11383b8456bdSManish V Badarkhe ret 11393b8456bdSManish V Badarkheendfunc save_and_update_ptw_el1_sys_regs 11403b8456bdSManish V Badarkhe 1141ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 1142ed108b56SAlexei Fedorov * This routine assumes that the SP_EL3 is pointing to a valid 1143ed108b56SAlexei Fedorov * context structure from where the gp regs and other special 1144ed108b56SAlexei Fedorov * registers can be retrieved. 1145ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 1146532ed618SSoby Mathew */ 1147532ed618SSoby Mathewfunc el3_exit 1148bb9549baSJan Dabros#if ENABLE_ASSERTIONS 1149bb9549baSJan Dabros /* el3_exit assumes SP_EL0 on entry */ 1150bb9549baSJan Dabros mrs x17, spsel 1151bb9549baSJan Dabros cmp x17, #MODE_SP_EL0 1152bb9549baSJan Dabros ASM_ASSERT(eq) 11530ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_ASSERTIONS */ 1154bb9549baSJan Dabros 1155ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 1156ed108b56SAlexei Fedorov * Save the current SP_EL0 i.e. the EL3 runtime stack which 1157ed108b56SAlexei Fedorov * will be used for handling the next SMC. 1158ed108b56SAlexei Fedorov * Then switch to SP_EL3. 1159ed108b56SAlexei Fedorov * ---------------------------------------------------------- 1160532ed618SSoby Mathew */ 1161532ed618SSoby Mathew mov x17, sp 1162ed108b56SAlexei Fedorov msr spsel, #MODE_SP_ELX 1163532ed618SSoby Mathew str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] 1164532ed618SSoby Mathew 11650c5e7d1cSMax Shvetsov#if IMAGE_BL31 11660c5e7d1cSMax Shvetsov /* ---------------------------------------------------------- 116768ac5ed0SArunachalam Ganapathy * Restore CPTR_EL3. 11680c5e7d1cSMax Shvetsov * ZCR is only restored if SVE is supported and enabled. 11690c5e7d1cSMax Shvetsov * Synchronization is required before zcr_el3 is addressed. 11700c5e7d1cSMax Shvetsov * ---------------------------------------------------------- 11710c5e7d1cSMax Shvetsov */ 11720c5e7d1cSMax Shvetsov ldp x19, x20, [sp, #CTX_EL3STATE_OFFSET + CTX_CPTR_EL3] 11730c5e7d1cSMax Shvetsov msr cptr_el3, x19 11740c5e7d1cSMax Shvetsov 11750c5e7d1cSMax Shvetsov ands x19, x19, #CPTR_EZ_BIT 11760c5e7d1cSMax Shvetsov beq sve_not_enabled 11770c5e7d1cSMax Shvetsov 11780c5e7d1cSMax Shvetsov isb 11790c5e7d1cSMax Shvetsov msr S3_6_C1_C2_0, x20 /* zcr_el3 */ 11800c5e7d1cSMax Shvetsovsve_not_enabled: 11810ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 */ 11820c5e7d1cSMax Shvetsov 1183fe007b2eSDimitris Papastamos#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 1184ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 1185ed108b56SAlexei Fedorov * Restore mitigation state as it was on entry to EL3 1186ed108b56SAlexei Fedorov * ---------------------------------------------------------- 1187ed108b56SAlexei Fedorov */ 1188fe007b2eSDimitris Papastamos ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE] 1189ed108b56SAlexei Fedorov cbz x17, 1f 1190fe007b2eSDimitris Papastamos blr x17 11914d1ccf0eSAntonio Nino Diaz1: 11920ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */ 11930ce220afSJayanth Dodderi Chidanand 1194ed108b56SAlexei Fedorov#if IMAGE_BL31 && RAS_EXTENSION 1195ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 1196ed108b56SAlexei Fedorov * Issue Error Synchronization Barrier to synchronize SErrors 1197ed108b56SAlexei Fedorov * before exiting EL3. We're running with EAs unmasked, so 1198ed108b56SAlexei Fedorov * any synchronized errors would be taken immediately; 1199ed108b56SAlexei Fedorov * therefore no need to inspect DISR_EL1 register. 1200ed108b56SAlexei Fedorov * ---------------------------------------------------------- 1201ed108b56SAlexei Fedorov */ 1202ed108b56SAlexei Fedorov esb 1203c2d32a5fSMadhukar Pappireddy#else 1204c2d32a5fSMadhukar Pappireddy dsb sy 12050ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 && RAS_EXTENSION */ 12060ce220afSJayanth Dodderi Chidanand 1207ff1d2ef3SManish Pandey /* ---------------------------------------------------------- 1208ff1d2ef3SManish Pandey * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET 1209ff1d2ef3SManish Pandey * ---------------------------------------------------------- 1210ff1d2ef3SManish Pandey */ 1211ff1d2ef3SManish Pandey ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] 1212ff1d2ef3SManish Pandey ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] 1213ff1d2ef3SManish Pandey msr scr_el3, x18 1214ff1d2ef3SManish Pandey msr spsr_el3, x16 1215ff1d2ef3SManish Pandey msr elr_el3, x17 1216ff1d2ef3SManish Pandey 1217ff1d2ef3SManish Pandey restore_ptw_el1_sys_regs 1218ff1d2ef3SManish Pandey 1219ff1d2ef3SManish Pandey /* ---------------------------------------------------------- 1220ff1d2ef3SManish Pandey * Restore general purpose (including x30), PMCR_EL0 and 1221ff1d2ef3SManish Pandey * ARMv8.3-PAuth registers. 1222ff1d2ef3SManish Pandey * Exit EL3 via ERET to a lower exception level. 1223ff1d2ef3SManish Pandey * ---------------------------------------------------------- 1224ff1d2ef3SManish Pandey */ 1225ff1d2ef3SManish Pandey bl restore_gp_pmcr_pauth_regs 1226ff1d2ef3SManish Pandey ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 1227ff1d2ef3SManish Pandey 1228c2d32a5fSMadhukar Pappireddy#ifdef IMAGE_BL31 1229c2d32a5fSMadhukar Pappireddy str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3] 12300ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 */ 12310ce220afSJayanth Dodderi Chidanand 1232f461fe34SAnthony Steinhauser exception_return 12335283962eSAntonio Nino Diaz 1234532ed618SSoby Mathewendfunc el3_exit 1235