1532ed618SSoby Mathew/* 2*c2d32a5fSMadhukar Pappireddy * Copyright (c) 2013-2021, ARM Limited and Contributors. All rights reserved. 3532ed618SSoby Mathew * 482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause 5532ed618SSoby Mathew */ 6532ed618SSoby Mathew 7532ed618SSoby Mathew#include <arch.h> 8532ed618SSoby Mathew#include <asm_macros.S> 9bb9549baSJan Dabros#include <assert_macros.S> 10532ed618SSoby Mathew#include <context.h> 113b8456bdSManish V Badarkhe#include <el3_common_macros.S> 12532ed618SSoby Mathew 1328f39f02SMax Shvetsov#if CTX_INCLUDE_EL2_REGS 1428f39f02SMax Shvetsov .global el2_sysregs_context_save 1528f39f02SMax Shvetsov .global el2_sysregs_context_restore 1628f39f02SMax Shvetsov#endif 1728f39f02SMax Shvetsov 18532ed618SSoby Mathew .global el1_sysregs_context_save 19532ed618SSoby Mathew .global el1_sysregs_context_restore 20532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS 21532ed618SSoby Mathew .global fpregs_context_save 22532ed618SSoby Mathew .global fpregs_context_restore 23532ed618SSoby Mathew#endif 24ed108b56SAlexei Fedorov .global save_gp_pmcr_pauth_regs 25ed108b56SAlexei Fedorov .global restore_gp_pmcr_pauth_regs 263b8456bdSManish V Badarkhe .global save_and_update_ptw_el1_sys_regs 27532ed618SSoby Mathew .global el3_exit 28532ed618SSoby Mathew 2928f39f02SMax Shvetsov#if CTX_INCLUDE_EL2_REGS 3028f39f02SMax Shvetsov 3128f39f02SMax Shvetsov/* ----------------------------------------------------- 3228f39f02SMax Shvetsov * The following function strictly follows the AArch64 3328f39f02SMax Shvetsov * PCS to use x9-x17 (temporary caller-saved registers) 342825946eSMax Shvetsov * to save EL2 system register context. It assumes that 352825946eSMax Shvetsov * 'x0' is pointing to a 'el2_sys_regs' structure where 3628f39f02SMax Shvetsov * the register context will be saved. 372825946eSMax Shvetsov * 382825946eSMax Shvetsov * The following registers are not added. 392825946eSMax Shvetsov * AMEVCNTVOFF0<n>_EL2 402825946eSMax Shvetsov * AMEVCNTVOFF1<n>_EL2 412825946eSMax Shvetsov * ICH_AP0R<n>_EL2 422825946eSMax Shvetsov * ICH_AP1R<n>_EL2 432825946eSMax Shvetsov * ICH_LR<n>_EL2 4428f39f02SMax Shvetsov * ----------------------------------------------------- 4528f39f02SMax Shvetsov */ 462825946eSMax Shvetsov 4728f39f02SMax Shvetsovfunc el2_sysregs_context_save 4828f39f02SMax Shvetsov mrs x9, actlr_el2 492825946eSMax Shvetsov mrs x10, afsr0_el2 502825946eSMax Shvetsov stp x9, x10, [x0, #CTX_ACTLR_EL2] 5128f39f02SMax Shvetsov 522825946eSMax Shvetsov mrs x11, afsr1_el2 532825946eSMax Shvetsov mrs x12, amair_el2 542825946eSMax Shvetsov stp x11, x12, [x0, #CTX_AFSR1_EL2] 5528f39f02SMax Shvetsov 562825946eSMax Shvetsov mrs x13, cnthctl_el2 572825946eSMax Shvetsov mrs x14, cnthp_ctl_el2 582825946eSMax Shvetsov stp x13, x14, [x0, #CTX_CNTHCTL_EL2] 5928f39f02SMax Shvetsov 602825946eSMax Shvetsov mrs x15, cnthp_cval_el2 612825946eSMax Shvetsov mrs x16, cnthp_tval_el2 622825946eSMax Shvetsov stp x15, x16, [x0, #CTX_CNTHP_CVAL_EL2] 6328f39f02SMax Shvetsov 642825946eSMax Shvetsov mrs x17, cntvoff_el2 6528f39f02SMax Shvetsov mrs x9, cptr_el2 662825946eSMax Shvetsov stp x17, x9, [x0, #CTX_CNTVOFF_EL2] 6728f39f02SMax Shvetsov 682825946eSMax Shvetsov mrs x11, elr_el2 690f777eabSArunachalam Ganapathy#if CTX_INCLUDE_AARCH32_REGS 700f777eabSArunachalam Ganapathy mrs x10, dbgvcr32_el2 712825946eSMax Shvetsov stp x10, x11, [x0, #CTX_DBGVCR32_EL2] 720f777eabSArunachalam Ganapathy#else 730f777eabSArunachalam Ganapathy str x11, [x0, #CTX_ELR_EL2] 740f777eabSArunachalam Ganapathy#endif 7528f39f02SMax Shvetsov 762825946eSMax Shvetsov mrs x14, esr_el2 772825946eSMax Shvetsov mrs x15, far_el2 782825946eSMax Shvetsov stp x14, x15, [x0, #CTX_ESR_EL2] 7928f39f02SMax Shvetsov 8030ee3755SMax Shvetsov mrs x16, hacr_el2 8130ee3755SMax Shvetsov mrs x17, hcr_el2 8230ee3755SMax Shvetsov stp x16, x17, [x0, #CTX_HACR_EL2] 8328f39f02SMax Shvetsov 8430ee3755SMax Shvetsov mrs x9, hpfar_el2 8530ee3755SMax Shvetsov mrs x10, hstr_el2 8630ee3755SMax Shvetsov stp x9, x10, [x0, #CTX_HPFAR_EL2] 8728f39f02SMax Shvetsov 8830ee3755SMax Shvetsov mrs x11, ICC_SRE_EL2 8930ee3755SMax Shvetsov mrs x12, ICH_HCR_EL2 9030ee3755SMax Shvetsov stp x11, x12, [x0, #CTX_ICC_SRE_EL2] 9128f39f02SMax Shvetsov 9230ee3755SMax Shvetsov mrs x13, ICH_VMCR_EL2 9330ee3755SMax Shvetsov mrs x14, mair_el2 9430ee3755SMax Shvetsov stp x13, x14, [x0, #CTX_ICH_VMCR_EL2] 9528f39f02SMax Shvetsov 9630ee3755SMax Shvetsov mrs x15, mdcr_el2 972b036b79SArunachalam Ganapathy#if ENABLE_SPE_FOR_LOWER_ELS 9830ee3755SMax Shvetsov mrs x16, PMSCR_EL2 9930ee3755SMax Shvetsov stp x15, x16, [x0, #CTX_MDCR_EL2] 1002b036b79SArunachalam Ganapathy#else 1012b036b79SArunachalam Ganapathy str x15, [x0, #CTX_MDCR_EL2] 1022b036b79SArunachalam Ganapathy#endif 10328f39f02SMax Shvetsov 10430ee3755SMax Shvetsov mrs x17, sctlr_el2 10530ee3755SMax Shvetsov mrs x9, spsr_el2 10630ee3755SMax Shvetsov stp x17, x9, [x0, #CTX_SCTLR_EL2] 10728f39f02SMax Shvetsov 10830ee3755SMax Shvetsov mrs x10, sp_el2 10930ee3755SMax Shvetsov mrs x11, tcr_el2 11030ee3755SMax Shvetsov stp x10, x11, [x0, #CTX_SP_EL2] 11128f39f02SMax Shvetsov 11230ee3755SMax Shvetsov mrs x12, tpidr_el2 11330ee3755SMax Shvetsov mrs x13, ttbr0_el2 11430ee3755SMax Shvetsov stp x12, x13, [x0, #CTX_TPIDR_EL2] 11528f39f02SMax Shvetsov 11630ee3755SMax Shvetsov mrs x14, vbar_el2 11730ee3755SMax Shvetsov mrs x15, vmpidr_el2 11830ee3755SMax Shvetsov stp x14, x15, [x0, #CTX_VBAR_EL2] 11928f39f02SMax Shvetsov 12030ee3755SMax Shvetsov mrs x16, vpidr_el2 12130ee3755SMax Shvetsov mrs x17, vtcr_el2 12230ee3755SMax Shvetsov stp x16, x17, [x0, #CTX_VPIDR_EL2] 12328f39f02SMax Shvetsov 12430ee3755SMax Shvetsov mrs x9, vttbr_el2 12530ee3755SMax Shvetsov str x9, [x0, #CTX_VTTBR_EL2] 12628f39f02SMax Shvetsov 1272825946eSMax Shvetsov#if CTX_INCLUDE_MTE_REGS 12830ee3755SMax Shvetsov mrs x10, TFSR_EL2 12930ee3755SMax Shvetsov str x10, [x0, #CTX_TFSR_EL2] 1302825946eSMax Shvetsov#endif 13128f39f02SMax Shvetsov 1322825946eSMax Shvetsov#if ENABLE_MPAM_FOR_LOWER_ELS 1332825946eSMax Shvetsov mrs x9, MPAM2_EL2 1342825946eSMax Shvetsov mrs x10, MPAMHCR_EL2 1352825946eSMax Shvetsov stp x9, x10, [x0, #CTX_MPAM2_EL2] 1362825946eSMax Shvetsov 1372825946eSMax Shvetsov mrs x11, MPAMVPM0_EL2 1382825946eSMax Shvetsov mrs x12, MPAMVPM1_EL2 1392825946eSMax Shvetsov stp x11, x12, [x0, #CTX_MPAMVPM0_EL2] 1402825946eSMax Shvetsov 1412825946eSMax Shvetsov mrs x13, MPAMVPM2_EL2 1422825946eSMax Shvetsov mrs x14, MPAMVPM3_EL2 1432825946eSMax Shvetsov stp x13, x14, [x0, #CTX_MPAMVPM2_EL2] 1442825946eSMax Shvetsov 1452825946eSMax Shvetsov mrs x15, MPAMVPM4_EL2 1462825946eSMax Shvetsov mrs x16, MPAMVPM5_EL2 1472825946eSMax Shvetsov stp x15, x16, [x0, #CTX_MPAMVPM4_EL2] 1482825946eSMax Shvetsov 1492825946eSMax Shvetsov mrs x17, MPAMVPM6_EL2 1502825946eSMax Shvetsov mrs x9, MPAMVPM7_EL2 1512825946eSMax Shvetsov stp x17, x9, [x0, #CTX_MPAMVPM6_EL2] 1522825946eSMax Shvetsov 1532825946eSMax Shvetsov mrs x10, MPAMVPMV_EL2 1542825946eSMax Shvetsov str x10, [x0, #CTX_MPAMVPMV_EL2] 1552825946eSMax Shvetsov#endif 1562825946eSMax Shvetsov 1572825946eSMax Shvetsov 1582825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 6) 1592825946eSMax Shvetsov mrs x11, HAFGRTR_EL2 1602825946eSMax Shvetsov mrs x12, HDFGRTR_EL2 1612825946eSMax Shvetsov stp x11, x12, [x0, #CTX_HAFGRTR_EL2] 1622825946eSMax Shvetsov 1632825946eSMax Shvetsov mrs x13, HDFGWTR_EL2 1642825946eSMax Shvetsov mrs x14, HFGITR_EL2 1652825946eSMax Shvetsov stp x13, x14, [x0, #CTX_HDFGWTR_EL2] 1662825946eSMax Shvetsov 1672825946eSMax Shvetsov mrs x15, HFGRTR_EL2 1682825946eSMax Shvetsov mrs x16, HFGWTR_EL2 1692825946eSMax Shvetsov stp x15, x16, [x0, #CTX_HFGRTR_EL2] 1702825946eSMax Shvetsov 1712825946eSMax Shvetsov mrs x17, CNTPOFF_EL2 1722825946eSMax Shvetsov str x17, [x0, #CTX_CNTPOFF_EL2] 1732825946eSMax Shvetsov#endif 1742825946eSMax Shvetsov 1752825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 4) 1762825946eSMax Shvetsov mrs x9, cnthps_ctl_el2 1772825946eSMax Shvetsov mrs x10, cnthps_cval_el2 1782825946eSMax Shvetsov stp x9, x10, [x0, #CTX_CNTHPS_CTL_EL2] 1792825946eSMax Shvetsov 1802825946eSMax Shvetsov mrs x11, cnthps_tval_el2 1812825946eSMax Shvetsov mrs x12, cnthvs_ctl_el2 1822825946eSMax Shvetsov stp x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2] 1832825946eSMax Shvetsov 1842825946eSMax Shvetsov mrs x13, cnthvs_cval_el2 1852825946eSMax Shvetsov mrs x14, cnthvs_tval_el2 1862825946eSMax Shvetsov stp x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2] 1872825946eSMax Shvetsov 1882825946eSMax Shvetsov mrs x15, cnthv_ctl_el2 1892825946eSMax Shvetsov mrs x16, cnthv_cval_el2 1902825946eSMax Shvetsov stp x15, x16, [x0, #CTX_CNTHV_CTL_EL2] 1912825946eSMax Shvetsov 1922825946eSMax Shvetsov mrs x17, cnthv_tval_el2 1932825946eSMax Shvetsov mrs x9, contextidr_el2 1942825946eSMax Shvetsov stp x17, x9, [x0, #CTX_CNTHV_TVAL_EL2] 1952825946eSMax Shvetsov 1960f777eabSArunachalam Ganapathy#if CTX_INCLUDE_AARCH32_REGS 1972825946eSMax Shvetsov mrs x10, sder32_el2 1982825946eSMax Shvetsov str x10, [x0, #CTX_SDER32_EL2] 1990f777eabSArunachalam Ganapathy#endif 2002825946eSMax Shvetsov 2012825946eSMax Shvetsov mrs x11, ttbr1_el2 2022825946eSMax Shvetsov str x11, [x0, #CTX_TTBR1_EL2] 2032825946eSMax Shvetsov 2042825946eSMax Shvetsov mrs x12, vdisr_el2 2052825946eSMax Shvetsov str x12, [x0, #CTX_VDISR_EL2] 2062825946eSMax Shvetsov 207062f8aafSArunachalam Ganapathy#if CTX_INCLUDE_NEVE_REGS 2082825946eSMax Shvetsov mrs x13, vncr_el2 2092825946eSMax Shvetsov str x13, [x0, #CTX_VNCR_EL2] 210062f8aafSArunachalam Ganapathy#endif 2112825946eSMax Shvetsov 2122825946eSMax Shvetsov mrs x14, vsesr_el2 2132825946eSMax Shvetsov str x14, [x0, #CTX_VSESR_EL2] 2142825946eSMax Shvetsov 2152825946eSMax Shvetsov mrs x15, vstcr_el2 2162825946eSMax Shvetsov str x15, [x0, #CTX_VSTCR_EL2] 2172825946eSMax Shvetsov 2182825946eSMax Shvetsov mrs x16, vsttbr_el2 2192825946eSMax Shvetsov str x16, [x0, #CTX_VSTTBR_EL2] 2207f164a83SOlivier Deprez 2217f164a83SOlivier Deprez mrs x17, TRFCR_EL2 2227f164a83SOlivier Deprez str x17, [x0, #CTX_TRFCR_EL2] 2232825946eSMax Shvetsov#endif 2242825946eSMax Shvetsov 2252825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 5) 2267f164a83SOlivier Deprez mrs x9, scxtnum_el2 2277f164a83SOlivier Deprez str x9, [x0, #CTX_SCXTNUM_EL2] 2282825946eSMax Shvetsov#endif 22928f39f02SMax Shvetsov 23028f39f02SMax Shvetsov ret 23128f39f02SMax Shvetsovendfunc el2_sysregs_context_save 23228f39f02SMax Shvetsov 23328f39f02SMax Shvetsov/* ----------------------------------------------------- 23428f39f02SMax Shvetsov * The following function strictly follows the AArch64 23528f39f02SMax Shvetsov * PCS to use x9-x17 (temporary caller-saved registers) 2362825946eSMax Shvetsov * to restore EL2 system register context. It assumes 2372825946eSMax Shvetsov * that 'x0' is pointing to a 'el2_sys_regs' structure 23828f39f02SMax Shvetsov * from where the register context will be restored 2392825946eSMax Shvetsov 2402825946eSMax Shvetsov * The following registers are not restored 2412825946eSMax Shvetsov * AMEVCNTVOFF0<n>_EL2 2422825946eSMax Shvetsov * AMEVCNTVOFF1<n>_EL2 2432825946eSMax Shvetsov * ICH_AP0R<n>_EL2 2442825946eSMax Shvetsov * ICH_AP1R<n>_EL2 2452825946eSMax Shvetsov * ICH_LR<n>_EL2 24628f39f02SMax Shvetsov * ----------------------------------------------------- 24728f39f02SMax Shvetsov */ 24828f39f02SMax Shvetsovfunc el2_sysregs_context_restore 24928f39f02SMax Shvetsov 2502825946eSMax Shvetsov ldp x9, x10, [x0, #CTX_ACTLR_EL2] 25128f39f02SMax Shvetsov msr actlr_el2, x9 2522825946eSMax Shvetsov msr afsr0_el2, x10 25328f39f02SMax Shvetsov 2542825946eSMax Shvetsov ldp x11, x12, [x0, #CTX_AFSR1_EL2] 2552825946eSMax Shvetsov msr afsr1_el2, x11 2562825946eSMax Shvetsov msr amair_el2, x12 25728f39f02SMax Shvetsov 2582825946eSMax Shvetsov ldp x13, x14, [x0, #CTX_CNTHCTL_EL2] 2592825946eSMax Shvetsov msr cnthctl_el2, x13 2602825946eSMax Shvetsov msr cnthp_ctl_el2, x14 26128f39f02SMax Shvetsov 2622825946eSMax Shvetsov ldp x15, x16, [x0, #CTX_CNTHP_CVAL_EL2] 2632825946eSMax Shvetsov msr cnthp_cval_el2, x15 2642825946eSMax Shvetsov msr cnthp_tval_el2, x16 26528f39f02SMax Shvetsov 2662825946eSMax Shvetsov ldp x17, x9, [x0, #CTX_CNTVOFF_EL2] 2672825946eSMax Shvetsov msr cntvoff_el2, x17 26828f39f02SMax Shvetsov msr cptr_el2, x9 26928f39f02SMax Shvetsov 2700f777eabSArunachalam Ganapathy#if CTX_INCLUDE_AARCH32_REGS 2712825946eSMax Shvetsov ldp x10, x11, [x0, #CTX_DBGVCR32_EL2] 2722825946eSMax Shvetsov msr dbgvcr32_el2, x10 2730f777eabSArunachalam Ganapathy#else 2740f777eabSArunachalam Ganapathy ldr x11, [x0, #CTX_ELR_EL2] 2750f777eabSArunachalam Ganapathy#endif 2762825946eSMax Shvetsov msr elr_el2, x11 27728f39f02SMax Shvetsov 2782825946eSMax Shvetsov ldp x14, x15, [x0, #CTX_ESR_EL2] 2792825946eSMax Shvetsov msr esr_el2, x14 2802825946eSMax Shvetsov msr far_el2, x15 28128f39f02SMax Shvetsov 28230ee3755SMax Shvetsov ldp x16, x17, [x0, #CTX_HACR_EL2] 28330ee3755SMax Shvetsov msr hacr_el2, x16 28430ee3755SMax Shvetsov msr hcr_el2, x17 28528f39f02SMax Shvetsov 28630ee3755SMax Shvetsov ldp x9, x10, [x0, #CTX_HPFAR_EL2] 28730ee3755SMax Shvetsov msr hpfar_el2, x9 28830ee3755SMax Shvetsov msr hstr_el2, x10 28928f39f02SMax Shvetsov 29030ee3755SMax Shvetsov ldp x11, x12, [x0, #CTX_ICC_SRE_EL2] 29130ee3755SMax Shvetsov msr ICC_SRE_EL2, x11 29230ee3755SMax Shvetsov msr ICH_HCR_EL2, x12 29328f39f02SMax Shvetsov 29430ee3755SMax Shvetsov ldp x13, x14, [x0, #CTX_ICH_VMCR_EL2] 29530ee3755SMax Shvetsov msr ICH_VMCR_EL2, x13 29630ee3755SMax Shvetsov msr mair_el2, x14 29728f39f02SMax Shvetsov 2982b036b79SArunachalam Ganapathy#if ENABLE_SPE_FOR_LOWER_ELS 29930ee3755SMax Shvetsov ldp x15, x16, [x0, #CTX_MDCR_EL2] 30030ee3755SMax Shvetsov msr PMSCR_EL2, x16 3012b036b79SArunachalam Ganapathy#else 3022b036b79SArunachalam Ganapathy ldr x15, [x0, #CTX_MDCR_EL2] 3032b036b79SArunachalam Ganapathy#endif 3042b036b79SArunachalam Ganapathy msr mdcr_el2, x15 30528f39f02SMax Shvetsov 306fb2072b0SManish V Badarkhe ldp x17, x9, [x0, #CTX_SCTLR_EL2] 307fb2072b0SManish V Badarkhe msr sctlr_el2, x17 308fb2072b0SManish V Badarkhe msr spsr_el2, x9 30928f39f02SMax Shvetsov 310fb2072b0SManish V Badarkhe ldp x10, x11, [x0, #CTX_SP_EL2] 311fb2072b0SManish V Badarkhe msr sp_el2, x10 312fb2072b0SManish V Badarkhe msr tcr_el2, x11 31328f39f02SMax Shvetsov 314fb2072b0SManish V Badarkhe ldp x12, x13, [x0, #CTX_TPIDR_EL2] 315fb2072b0SManish V Badarkhe msr tpidr_el2, x12 316fb2072b0SManish V Badarkhe msr ttbr0_el2, x13 31728f39f02SMax Shvetsov 318fb2072b0SManish V Badarkhe ldp x13, x14, [x0, #CTX_VBAR_EL2] 319fb2072b0SManish V Badarkhe msr vbar_el2, x13 320fb2072b0SManish V Badarkhe msr vmpidr_el2, x14 32128f39f02SMax Shvetsov 322fb2072b0SManish V Badarkhe ldp x15, x16, [x0, #CTX_VPIDR_EL2] 323fb2072b0SManish V Badarkhe msr vpidr_el2, x15 324fb2072b0SManish V Badarkhe msr vtcr_el2, x16 325fb2072b0SManish V Badarkhe 326fb2072b0SManish V Badarkhe ldr x17, [x0, #CTX_VTTBR_EL2] 327fb2072b0SManish V Badarkhe msr vttbr_el2, x17 32828f39f02SMax Shvetsov 3292825946eSMax Shvetsov#if CTX_INCLUDE_MTE_REGS 330fb2072b0SManish V Badarkhe ldr x9, [x0, #CTX_TFSR_EL2] 331fb2072b0SManish V Badarkhe msr TFSR_EL2, x9 3322825946eSMax Shvetsov#endif 33328f39f02SMax Shvetsov 3342825946eSMax Shvetsov#if ENABLE_MPAM_FOR_LOWER_ELS 335fb2072b0SManish V Badarkhe ldp x10, x11, [x0, #CTX_MPAM2_EL2] 336fb2072b0SManish V Badarkhe msr MPAM2_EL2, x10 337fb2072b0SManish V Badarkhe msr MPAMHCR_EL2, x11 3382825946eSMax Shvetsov 339fb2072b0SManish V Badarkhe ldp x12, x13, [x0, #CTX_MPAMVPM0_EL2] 340fb2072b0SManish V Badarkhe msr MPAMVPM0_EL2, x12 341fb2072b0SManish V Badarkhe msr MPAMVPM1_EL2, x13 3422825946eSMax Shvetsov 343fb2072b0SManish V Badarkhe ldp x14, x15, [x0, #CTX_MPAMVPM2_EL2] 344fb2072b0SManish V Badarkhe msr MPAMVPM2_EL2, x14 345fb2072b0SManish V Badarkhe msr MPAMVPM3_EL2, x15 3462825946eSMax Shvetsov 347fb2072b0SManish V Badarkhe ldp x16, x17, [x0, #CTX_MPAMVPM4_EL2] 348fb2072b0SManish V Badarkhe msr MPAMVPM4_EL2, x16 349fb2072b0SManish V Badarkhe msr MPAMVPM5_EL2, x17 3502825946eSMax Shvetsov 351fb2072b0SManish V Badarkhe ldp x9, x10, [x0, #CTX_MPAMVPM6_EL2] 352fb2072b0SManish V Badarkhe msr MPAMVPM6_EL2, x9 353fb2072b0SManish V Badarkhe msr MPAMVPM7_EL2, x10 3542825946eSMax Shvetsov 355fb2072b0SManish V Badarkhe ldr x11, [x0, #CTX_MPAMVPMV_EL2] 356fb2072b0SManish V Badarkhe msr MPAMVPMV_EL2, x11 3572825946eSMax Shvetsov#endif 3582825946eSMax Shvetsov 3592825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 6) 360fb2072b0SManish V Badarkhe ldp x12, x13, [x0, #CTX_HAFGRTR_EL2] 361fb2072b0SManish V Badarkhe msr HAFGRTR_EL2, x12 362fb2072b0SManish V Badarkhe msr HDFGRTR_EL2, x13 3632825946eSMax Shvetsov 364fb2072b0SManish V Badarkhe ldp x14, x15, [x0, #CTX_HDFGWTR_EL2] 365fb2072b0SManish V Badarkhe msr HDFGWTR_EL2, x14 366fb2072b0SManish V Badarkhe msr HFGITR_EL2, x15 3672825946eSMax Shvetsov 368fb2072b0SManish V Badarkhe ldp x16, x17, [x0, #CTX_HFGRTR_EL2] 369fb2072b0SManish V Badarkhe msr HFGRTR_EL2, x16 370fb2072b0SManish V Badarkhe msr HFGWTR_EL2, x17 3712825946eSMax Shvetsov 372fb2072b0SManish V Badarkhe ldr x9, [x0, #CTX_CNTPOFF_EL2] 373fb2072b0SManish V Badarkhe msr CNTPOFF_EL2, x9 3742825946eSMax Shvetsov#endif 3752825946eSMax Shvetsov 3762825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 4) 377fb2072b0SManish V Badarkhe ldp x10, x11, [x0, #CTX_CNTHPS_CTL_EL2] 378fb2072b0SManish V Badarkhe msr cnthps_ctl_el2, x10 379fb2072b0SManish V Badarkhe msr cnthps_cval_el2, x11 3802825946eSMax Shvetsov 381fb2072b0SManish V Badarkhe ldp x12, x13, [x0, #CTX_CNTHPS_TVAL_EL2] 382fb2072b0SManish V Badarkhe msr cnthps_tval_el2, x12 383fb2072b0SManish V Badarkhe msr cnthvs_ctl_el2, x13 3842825946eSMax Shvetsov 385fb2072b0SManish V Badarkhe ldp x14, x15, [x0, #CTX_CNTHVS_CVAL_EL2] 386fb2072b0SManish V Badarkhe msr cnthvs_cval_el2, x14 387fb2072b0SManish V Badarkhe msr cnthvs_tval_el2, x15 3882825946eSMax Shvetsov 389fb2072b0SManish V Badarkhe ldp x16, x17, [x0, #CTX_CNTHV_CTL_EL2] 390fb2072b0SManish V Badarkhe msr cnthv_ctl_el2, x16 391fb2072b0SManish V Badarkhe msr cnthv_cval_el2, x17 3922825946eSMax Shvetsov 393fb2072b0SManish V Badarkhe ldp x9, x10, [x0, #CTX_CNTHV_TVAL_EL2] 394fb2072b0SManish V Badarkhe msr cnthv_tval_el2, x9 395fb2072b0SManish V Badarkhe msr contextidr_el2, x10 3962825946eSMax Shvetsov 3970f777eabSArunachalam Ganapathy#if CTX_INCLUDE_AARCH32_REGS 398fb2072b0SManish V Badarkhe ldr x11, [x0, #CTX_SDER32_EL2] 399fb2072b0SManish V Badarkhe msr sder32_el2, x11 4000f777eabSArunachalam Ganapathy#endif 4012825946eSMax Shvetsov 402fb2072b0SManish V Badarkhe ldr x12, [x0, #CTX_TTBR1_EL2] 403fb2072b0SManish V Badarkhe msr ttbr1_el2, x12 4042825946eSMax Shvetsov 405fb2072b0SManish V Badarkhe ldr x13, [x0, #CTX_VDISR_EL2] 406fb2072b0SManish V Badarkhe msr vdisr_el2, x13 4072825946eSMax Shvetsov 408062f8aafSArunachalam Ganapathy#if CTX_INCLUDE_NEVE_REGS 409fb2072b0SManish V Badarkhe ldr x14, [x0, #CTX_VNCR_EL2] 410fb2072b0SManish V Badarkhe msr vncr_el2, x14 411062f8aafSArunachalam Ganapathy#endif 4122825946eSMax Shvetsov 413fb2072b0SManish V Badarkhe ldr x15, [x0, #CTX_VSESR_EL2] 414fb2072b0SManish V Badarkhe msr vsesr_el2, x15 4152825946eSMax Shvetsov 416fb2072b0SManish V Badarkhe ldr x16, [x0, #CTX_VSTCR_EL2] 417fb2072b0SManish V Badarkhe msr vstcr_el2, x16 4182825946eSMax Shvetsov 419fb2072b0SManish V Badarkhe ldr x17, [x0, #CTX_VSTTBR_EL2] 420fb2072b0SManish V Badarkhe msr vsttbr_el2, x17 4217f164a83SOlivier Deprez 422fb2072b0SManish V Badarkhe ldr x9, [x0, #CTX_TRFCR_EL2] 423fb2072b0SManish V Badarkhe msr TRFCR_EL2, x9 4242825946eSMax Shvetsov#endif 4252825946eSMax Shvetsov 4262825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 5) 427fb2072b0SManish V Badarkhe ldr x10, [x0, #CTX_SCXTNUM_EL2] 428fb2072b0SManish V Badarkhe msr scxtnum_el2, x10 4292825946eSMax Shvetsov#endif 43028f39f02SMax Shvetsov 43128f39f02SMax Shvetsov ret 43228f39f02SMax Shvetsovendfunc el2_sysregs_context_restore 43328f39f02SMax Shvetsov 43428f39f02SMax Shvetsov#endif /* CTX_INCLUDE_EL2_REGS */ 43528f39f02SMax Shvetsov 436ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 437ed108b56SAlexei Fedorov * The following function strictly follows the AArch64 PCS to use 438ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers) to save EL1 system 439ed108b56SAlexei Fedorov * register context. It assumes that 'x0' is pointing to a 440ed108b56SAlexei Fedorov * 'el1_sys_regs' structure where the register context will be saved. 441ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 442532ed618SSoby Mathew */ 443532ed618SSoby Mathewfunc el1_sysregs_context_save 444532ed618SSoby Mathew 445532ed618SSoby Mathew mrs x9, spsr_el1 446532ed618SSoby Mathew mrs x10, elr_el1 447532ed618SSoby Mathew stp x9, x10, [x0, #CTX_SPSR_EL1] 448532ed618SSoby Mathew 4493b8456bdSManish V Badarkhe#if !ERRATA_SPECULATIVE_AT 450532ed618SSoby Mathew mrs x15, sctlr_el1 451cb55615cSManish V Badarkhe mrs x16, tcr_el1 452532ed618SSoby Mathew stp x15, x16, [x0, #CTX_SCTLR_EL1] 4533b8456bdSManish V Badarkhe#endif 454532ed618SSoby Mathew 455532ed618SSoby Mathew mrs x17, cpacr_el1 456532ed618SSoby Mathew mrs x9, csselr_el1 457532ed618SSoby Mathew stp x17, x9, [x0, #CTX_CPACR_EL1] 458532ed618SSoby Mathew 459532ed618SSoby Mathew mrs x10, sp_el1 460532ed618SSoby Mathew mrs x11, esr_el1 461532ed618SSoby Mathew stp x10, x11, [x0, #CTX_SP_EL1] 462532ed618SSoby Mathew 463532ed618SSoby Mathew mrs x12, ttbr0_el1 464532ed618SSoby Mathew mrs x13, ttbr1_el1 465532ed618SSoby Mathew stp x12, x13, [x0, #CTX_TTBR0_EL1] 466532ed618SSoby Mathew 467532ed618SSoby Mathew mrs x14, mair_el1 468532ed618SSoby Mathew mrs x15, amair_el1 469532ed618SSoby Mathew stp x14, x15, [x0, #CTX_MAIR_EL1] 470532ed618SSoby Mathew 471cb55615cSManish V Badarkhe mrs x16, actlr_el1 472532ed618SSoby Mathew mrs x17, tpidr_el1 473cb55615cSManish V Badarkhe stp x16, x17, [x0, #CTX_ACTLR_EL1] 474532ed618SSoby Mathew 475532ed618SSoby Mathew mrs x9, tpidr_el0 476532ed618SSoby Mathew mrs x10, tpidrro_el0 477532ed618SSoby Mathew stp x9, x10, [x0, #CTX_TPIDR_EL0] 478532ed618SSoby Mathew 479532ed618SSoby Mathew mrs x13, par_el1 480532ed618SSoby Mathew mrs x14, far_el1 481532ed618SSoby Mathew stp x13, x14, [x0, #CTX_PAR_EL1] 482532ed618SSoby Mathew 483532ed618SSoby Mathew mrs x15, afsr0_el1 484532ed618SSoby Mathew mrs x16, afsr1_el1 485532ed618SSoby Mathew stp x15, x16, [x0, #CTX_AFSR0_EL1] 486532ed618SSoby Mathew 487532ed618SSoby Mathew mrs x17, contextidr_el1 488532ed618SSoby Mathew mrs x9, vbar_el1 489532ed618SSoby Mathew stp x17, x9, [x0, #CTX_CONTEXTIDR_EL1] 490532ed618SSoby Mathew 491532ed618SSoby Mathew /* Save AArch32 system registers if the build has instructed so */ 492532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS 493532ed618SSoby Mathew mrs x11, spsr_abt 494532ed618SSoby Mathew mrs x12, spsr_und 495532ed618SSoby Mathew stp x11, x12, [x0, #CTX_SPSR_ABT] 496532ed618SSoby Mathew 497532ed618SSoby Mathew mrs x13, spsr_irq 498532ed618SSoby Mathew mrs x14, spsr_fiq 499532ed618SSoby Mathew stp x13, x14, [x0, #CTX_SPSR_IRQ] 500532ed618SSoby Mathew 501532ed618SSoby Mathew mrs x15, dacr32_el2 502532ed618SSoby Mathew mrs x16, ifsr32_el2 503532ed618SSoby Mathew stp x15, x16, [x0, #CTX_DACR32_EL2] 504532ed618SSoby Mathew#endif 505532ed618SSoby Mathew 506532ed618SSoby Mathew /* Save NS timer registers if the build has instructed so */ 507532ed618SSoby Mathew#if NS_TIMER_SWITCH 508532ed618SSoby Mathew mrs x10, cntp_ctl_el0 509532ed618SSoby Mathew mrs x11, cntp_cval_el0 510532ed618SSoby Mathew stp x10, x11, [x0, #CTX_CNTP_CTL_EL0] 511532ed618SSoby Mathew 512532ed618SSoby Mathew mrs x12, cntv_ctl_el0 513532ed618SSoby Mathew mrs x13, cntv_cval_el0 514532ed618SSoby Mathew stp x12, x13, [x0, #CTX_CNTV_CTL_EL0] 515532ed618SSoby Mathew 516532ed618SSoby Mathew mrs x14, cntkctl_el1 517532ed618SSoby Mathew str x14, [x0, #CTX_CNTKCTL_EL1] 518532ed618SSoby Mathew#endif 519532ed618SSoby Mathew 5209dd94382SJustin Chadwell /* Save MTE system registers if the build has instructed so */ 5219dd94382SJustin Chadwell#if CTX_INCLUDE_MTE_REGS 5229dd94382SJustin Chadwell mrs x15, TFSRE0_EL1 5239dd94382SJustin Chadwell mrs x16, TFSR_EL1 5249dd94382SJustin Chadwell stp x15, x16, [x0, #CTX_TFSRE0_EL1] 5259dd94382SJustin Chadwell 5269dd94382SJustin Chadwell mrs x9, RGSR_EL1 5279dd94382SJustin Chadwell mrs x10, GCR_EL1 5289dd94382SJustin Chadwell stp x9, x10, [x0, #CTX_RGSR_EL1] 5299dd94382SJustin Chadwell#endif 5309dd94382SJustin Chadwell 531532ed618SSoby Mathew ret 532532ed618SSoby Mathewendfunc el1_sysregs_context_save 533532ed618SSoby Mathew 534ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 535ed108b56SAlexei Fedorov * The following function strictly follows the AArch64 PCS to use 536ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers) to restore EL1 system 537ed108b56SAlexei Fedorov * register context. It assumes that 'x0' is pointing to a 538ed108b56SAlexei Fedorov * 'el1_sys_regs' structure from where the register context will be 539ed108b56SAlexei Fedorov * restored 540ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 541532ed618SSoby Mathew */ 542532ed618SSoby Mathewfunc el1_sysregs_context_restore 543532ed618SSoby Mathew 544532ed618SSoby Mathew ldp x9, x10, [x0, #CTX_SPSR_EL1] 545532ed618SSoby Mathew msr spsr_el1, x9 546532ed618SSoby Mathew msr elr_el1, x10 547532ed618SSoby Mathew 5483b8456bdSManish V Badarkhe#if !ERRATA_SPECULATIVE_AT 549fb2072b0SManish V Badarkhe ldp x15, x16, [x0, #CTX_SCTLR_EL1] 550fb2072b0SManish V Badarkhe msr sctlr_el1, x15 551cb55615cSManish V Badarkhe msr tcr_el1, x16 5523b8456bdSManish V Badarkhe#endif 553532ed618SSoby Mathew 554532ed618SSoby Mathew ldp x17, x9, [x0, #CTX_CPACR_EL1] 555532ed618SSoby Mathew msr cpacr_el1, x17 556532ed618SSoby Mathew msr csselr_el1, x9 557532ed618SSoby Mathew 558532ed618SSoby Mathew ldp x10, x11, [x0, #CTX_SP_EL1] 559532ed618SSoby Mathew msr sp_el1, x10 560532ed618SSoby Mathew msr esr_el1, x11 561532ed618SSoby Mathew 562532ed618SSoby Mathew ldp x12, x13, [x0, #CTX_TTBR0_EL1] 563532ed618SSoby Mathew msr ttbr0_el1, x12 564532ed618SSoby Mathew msr ttbr1_el1, x13 565532ed618SSoby Mathew 566532ed618SSoby Mathew ldp x14, x15, [x0, #CTX_MAIR_EL1] 567532ed618SSoby Mathew msr mair_el1, x14 568532ed618SSoby Mathew msr amair_el1, x15 569532ed618SSoby Mathew 570cb55615cSManish V Badarkhe ldp x16, x17, [x0, #CTX_ACTLR_EL1] 571cb55615cSManish V Badarkhe msr actlr_el1, x16 572fb2072b0SManish V Badarkhe msr tpidr_el1, x17 573532ed618SSoby Mathew 574532ed618SSoby Mathew ldp x9, x10, [x0, #CTX_TPIDR_EL0] 575532ed618SSoby Mathew msr tpidr_el0, x9 576532ed618SSoby Mathew msr tpidrro_el0, x10 577532ed618SSoby Mathew 578532ed618SSoby Mathew ldp x13, x14, [x0, #CTX_PAR_EL1] 579532ed618SSoby Mathew msr par_el1, x13 580532ed618SSoby Mathew msr far_el1, x14 581532ed618SSoby Mathew 582532ed618SSoby Mathew ldp x15, x16, [x0, #CTX_AFSR0_EL1] 583532ed618SSoby Mathew msr afsr0_el1, x15 584532ed618SSoby Mathew msr afsr1_el1, x16 585532ed618SSoby Mathew 586532ed618SSoby Mathew ldp x17, x9, [x0, #CTX_CONTEXTIDR_EL1] 587532ed618SSoby Mathew msr contextidr_el1, x17 588532ed618SSoby Mathew msr vbar_el1, x9 589532ed618SSoby Mathew 590532ed618SSoby Mathew /* Restore AArch32 system registers if the build has instructed so */ 591532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS 592532ed618SSoby Mathew ldp x11, x12, [x0, #CTX_SPSR_ABT] 593532ed618SSoby Mathew msr spsr_abt, x11 594532ed618SSoby Mathew msr spsr_und, x12 595532ed618SSoby Mathew 596532ed618SSoby Mathew ldp x13, x14, [x0, #CTX_SPSR_IRQ] 597532ed618SSoby Mathew msr spsr_irq, x13 598532ed618SSoby Mathew msr spsr_fiq, x14 599532ed618SSoby Mathew 600532ed618SSoby Mathew ldp x15, x16, [x0, #CTX_DACR32_EL2] 601532ed618SSoby Mathew msr dacr32_el2, x15 602532ed618SSoby Mathew msr ifsr32_el2, x16 603532ed618SSoby Mathew#endif 604532ed618SSoby Mathew /* Restore NS timer registers if the build has instructed so */ 605532ed618SSoby Mathew#if NS_TIMER_SWITCH 606532ed618SSoby Mathew ldp x10, x11, [x0, #CTX_CNTP_CTL_EL0] 607532ed618SSoby Mathew msr cntp_ctl_el0, x10 608532ed618SSoby Mathew msr cntp_cval_el0, x11 609532ed618SSoby Mathew 610532ed618SSoby Mathew ldp x12, x13, [x0, #CTX_CNTV_CTL_EL0] 611532ed618SSoby Mathew msr cntv_ctl_el0, x12 612532ed618SSoby Mathew msr cntv_cval_el0, x13 613532ed618SSoby Mathew 614532ed618SSoby Mathew ldr x14, [x0, #CTX_CNTKCTL_EL1] 615532ed618SSoby Mathew msr cntkctl_el1, x14 616532ed618SSoby Mathew#endif 6179dd94382SJustin Chadwell /* Restore MTE system registers if the build has instructed so */ 6189dd94382SJustin Chadwell#if CTX_INCLUDE_MTE_REGS 6199dd94382SJustin Chadwell ldp x11, x12, [x0, #CTX_TFSRE0_EL1] 6209dd94382SJustin Chadwell msr TFSRE0_EL1, x11 6219dd94382SJustin Chadwell msr TFSR_EL1, x12 6229dd94382SJustin Chadwell 6239dd94382SJustin Chadwell ldp x13, x14, [x0, #CTX_RGSR_EL1] 6249dd94382SJustin Chadwell msr RGSR_EL1, x13 6259dd94382SJustin Chadwell msr GCR_EL1, x14 6269dd94382SJustin Chadwell#endif 627532ed618SSoby Mathew 628532ed618SSoby Mathew /* No explict ISB required here as ERET covers it */ 629532ed618SSoby Mathew ret 630532ed618SSoby Mathewendfunc el1_sysregs_context_restore 631532ed618SSoby Mathew 632ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 633ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use 634ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers according to AArch64 PCS) 635ed108b56SAlexei Fedorov * to save floating point register context. It assumes that 'x0' is 636ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure where the register context will 637532ed618SSoby Mathew * be saved. 638532ed618SSoby Mathew * 639ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set. 640ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in 641ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared. 642532ed618SSoby Mathew * 643532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world 644ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 645532ed618SSoby Mathew */ 646532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS 647532ed618SSoby Mathewfunc fpregs_context_save 648532ed618SSoby Mathew stp q0, q1, [x0, #CTX_FP_Q0] 649532ed618SSoby Mathew stp q2, q3, [x0, #CTX_FP_Q2] 650532ed618SSoby Mathew stp q4, q5, [x0, #CTX_FP_Q4] 651532ed618SSoby Mathew stp q6, q7, [x0, #CTX_FP_Q6] 652532ed618SSoby Mathew stp q8, q9, [x0, #CTX_FP_Q8] 653532ed618SSoby Mathew stp q10, q11, [x0, #CTX_FP_Q10] 654532ed618SSoby Mathew stp q12, q13, [x0, #CTX_FP_Q12] 655532ed618SSoby Mathew stp q14, q15, [x0, #CTX_FP_Q14] 656532ed618SSoby Mathew stp q16, q17, [x0, #CTX_FP_Q16] 657532ed618SSoby Mathew stp q18, q19, [x0, #CTX_FP_Q18] 658532ed618SSoby Mathew stp q20, q21, [x0, #CTX_FP_Q20] 659532ed618SSoby Mathew stp q22, q23, [x0, #CTX_FP_Q22] 660532ed618SSoby Mathew stp q24, q25, [x0, #CTX_FP_Q24] 661532ed618SSoby Mathew stp q26, q27, [x0, #CTX_FP_Q26] 662532ed618SSoby Mathew stp q28, q29, [x0, #CTX_FP_Q28] 663532ed618SSoby Mathew stp q30, q31, [x0, #CTX_FP_Q30] 664532ed618SSoby Mathew 665532ed618SSoby Mathew mrs x9, fpsr 666532ed618SSoby Mathew str x9, [x0, #CTX_FP_FPSR] 667532ed618SSoby Mathew 668532ed618SSoby Mathew mrs x10, fpcr 669532ed618SSoby Mathew str x10, [x0, #CTX_FP_FPCR] 670532ed618SSoby Mathew 67191089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS 67291089f36SDavid Cunado mrs x11, fpexc32_el2 67391089f36SDavid Cunado str x11, [x0, #CTX_FP_FPEXC32_EL2] 67491089f36SDavid Cunado#endif 675532ed618SSoby Mathew ret 676532ed618SSoby Mathewendfunc fpregs_context_save 677532ed618SSoby Mathew 678ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 679ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use x9-x17 680ed108b56SAlexei Fedorov * (temporary caller-saved registers according to AArch64 PCS) to 681ed108b56SAlexei Fedorov * restore floating point register context. It assumes that 'x0' is 682ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure from where the register context 683532ed618SSoby Mathew * will be restored. 684532ed618SSoby Mathew * 685ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set. 686ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in 687ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared. 688532ed618SSoby Mathew * 689532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world 690ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 691532ed618SSoby Mathew */ 692532ed618SSoby Mathewfunc fpregs_context_restore 693532ed618SSoby Mathew ldp q0, q1, [x0, #CTX_FP_Q0] 694532ed618SSoby Mathew ldp q2, q3, [x0, #CTX_FP_Q2] 695532ed618SSoby Mathew ldp q4, q5, [x0, #CTX_FP_Q4] 696532ed618SSoby Mathew ldp q6, q7, [x0, #CTX_FP_Q6] 697532ed618SSoby Mathew ldp q8, q9, [x0, #CTX_FP_Q8] 698532ed618SSoby Mathew ldp q10, q11, [x0, #CTX_FP_Q10] 699532ed618SSoby Mathew ldp q12, q13, [x0, #CTX_FP_Q12] 700532ed618SSoby Mathew ldp q14, q15, [x0, #CTX_FP_Q14] 701532ed618SSoby Mathew ldp q16, q17, [x0, #CTX_FP_Q16] 702532ed618SSoby Mathew ldp q18, q19, [x0, #CTX_FP_Q18] 703532ed618SSoby Mathew ldp q20, q21, [x0, #CTX_FP_Q20] 704532ed618SSoby Mathew ldp q22, q23, [x0, #CTX_FP_Q22] 705532ed618SSoby Mathew ldp q24, q25, [x0, #CTX_FP_Q24] 706532ed618SSoby Mathew ldp q26, q27, [x0, #CTX_FP_Q26] 707532ed618SSoby Mathew ldp q28, q29, [x0, #CTX_FP_Q28] 708532ed618SSoby Mathew ldp q30, q31, [x0, #CTX_FP_Q30] 709532ed618SSoby Mathew 710532ed618SSoby Mathew ldr x9, [x0, #CTX_FP_FPSR] 711532ed618SSoby Mathew msr fpsr, x9 712532ed618SSoby Mathew 713532ed618SSoby Mathew ldr x10, [x0, #CTX_FP_FPCR] 714532ed618SSoby Mathew msr fpcr, x10 715532ed618SSoby Mathew 71691089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS 71791089f36SDavid Cunado ldr x11, [x0, #CTX_FP_FPEXC32_EL2] 71891089f36SDavid Cunado msr fpexc32_el2, x11 71991089f36SDavid Cunado#endif 720532ed618SSoby Mathew /* 721532ed618SSoby Mathew * No explict ISB required here as ERET to 722532ed618SSoby Mathew * switch to secure EL1 or non-secure world 723532ed618SSoby Mathew * covers it 724532ed618SSoby Mathew */ 725532ed618SSoby Mathew 726532ed618SSoby Mathew ret 727532ed618SSoby Mathewendfunc fpregs_context_restore 728532ed618SSoby Mathew#endif /* CTX_INCLUDE_FPREGS */ 729532ed618SSoby Mathew 730ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 731ed108b56SAlexei Fedorov * The following function is used to save and restore all the general 732ed108b56SAlexei Fedorov * purpose and ARMv8.3-PAuth (if enabled) registers. 733ed108b56SAlexei Fedorov * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3 734ed108b56SAlexei Fedorov * when ARMv8.5-PMU is implemented, and if called from Non-secure 735ed108b56SAlexei Fedorov * state saves PMCR_EL0 and disables Cycle Counter. 736ed108b56SAlexei Fedorov * 737ed108b56SAlexei Fedorov * Ideally we would only save and restore the callee saved registers 738ed108b56SAlexei Fedorov * when a world switch occurs but that type of implementation is more 739ed108b56SAlexei Fedorov * complex. So currently we will always save and restore these 740ed108b56SAlexei Fedorov * registers on entry and exit of EL3. 741ed108b56SAlexei Fedorov * These are not macros to ensure their invocation fits within the 32 742ed108b56SAlexei Fedorov * instructions per exception vector. 743532ed618SSoby Mathew * clobbers: x18 744ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 745532ed618SSoby Mathew */ 746ed108b56SAlexei Fedorovfunc save_gp_pmcr_pauth_regs 747532ed618SSoby Mathew stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 748532ed618SSoby Mathew stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 749532ed618SSoby Mathew stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 750532ed618SSoby Mathew stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] 751532ed618SSoby Mathew stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] 752532ed618SSoby Mathew stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] 753532ed618SSoby Mathew stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] 754532ed618SSoby Mathew stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] 755532ed618SSoby Mathew stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] 756532ed618SSoby Mathew stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] 757532ed618SSoby Mathew stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] 758532ed618SSoby Mathew stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] 759532ed618SSoby Mathew stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] 760532ed618SSoby Mathew stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] 761532ed618SSoby Mathew stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] 762532ed618SSoby Mathew mrs x18, sp_el0 763532ed618SSoby Mathew str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] 764532ed618SSoby Mathew 765ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 766ed108b56SAlexei Fedorov * Check if earlier initialization MDCR_EL3.SCCD to 1 failed, 767ed108b56SAlexei Fedorov * meaning that ARMv8-PMU is not implemented and PMCR_EL0 768ed108b56SAlexei Fedorov * should be saved in non-secure context. 769ed108b56SAlexei Fedorov * ---------------------------------------------------------- 770ef653d93SJeenu Viswambharan */ 771ed108b56SAlexei Fedorov mrs x9, mdcr_el3 772ed108b56SAlexei Fedorov tst x9, #MDCR_SCCD_BIT 773ed108b56SAlexei Fedorov bne 1f 774ed108b56SAlexei Fedorov 775ed108b56SAlexei Fedorov /* Secure Cycle Counter is not disabled */ 776ed108b56SAlexei Fedorov mrs x9, pmcr_el0 777ed108b56SAlexei Fedorov 778ed108b56SAlexei Fedorov /* Check caller's security state */ 779ed108b56SAlexei Fedorov mrs x10, scr_el3 780ed108b56SAlexei Fedorov tst x10, #SCR_NS_BIT 781ed108b56SAlexei Fedorov beq 2f 782ed108b56SAlexei Fedorov 783ed108b56SAlexei Fedorov /* Save PMCR_EL0 if called from Non-secure state */ 784ed108b56SAlexei Fedorov str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] 785ed108b56SAlexei Fedorov 786ed108b56SAlexei Fedorov /* Disable cycle counter when event counting is prohibited */ 787ed108b56SAlexei Fedorov2: orr x9, x9, #PMCR_EL0_DP_BIT 788ed108b56SAlexei Fedorov msr pmcr_el0, x9 789ed108b56SAlexei Fedorov isb 790ed108b56SAlexei Fedorov1: 791ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS 792ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 793ed108b56SAlexei Fedorov * Save the ARMv8.3-PAuth keys as they are not banked 794ed108b56SAlexei Fedorov * by exception level 795ed108b56SAlexei Fedorov * ---------------------------------------------------------- 796ed108b56SAlexei Fedorov */ 797ed108b56SAlexei Fedorov add x19, sp, #CTX_PAUTH_REGS_OFFSET 798ed108b56SAlexei Fedorov 799ed108b56SAlexei Fedorov mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */ 800ed108b56SAlexei Fedorov mrs x21, APIAKeyHi_EL1 801ed108b56SAlexei Fedorov mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */ 802ed108b56SAlexei Fedorov mrs x23, APIBKeyHi_EL1 803ed108b56SAlexei Fedorov mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */ 804ed108b56SAlexei Fedorov mrs x25, APDAKeyHi_EL1 805ed108b56SAlexei Fedorov mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */ 806ed108b56SAlexei Fedorov mrs x27, APDBKeyHi_EL1 807ed108b56SAlexei Fedorov mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */ 808ed108b56SAlexei Fedorov mrs x29, APGAKeyHi_EL1 809ed108b56SAlexei Fedorov 810ed108b56SAlexei Fedorov stp x20, x21, [x19, #CTX_PACIAKEY_LO] 811ed108b56SAlexei Fedorov stp x22, x23, [x19, #CTX_PACIBKEY_LO] 812ed108b56SAlexei Fedorov stp x24, x25, [x19, #CTX_PACDAKEY_LO] 813ed108b56SAlexei Fedorov stp x26, x27, [x19, #CTX_PACDBKEY_LO] 814ed108b56SAlexei Fedorov stp x28, x29, [x19, #CTX_PACGAKEY_LO] 815ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */ 816ed108b56SAlexei Fedorov 817ed108b56SAlexei Fedorov ret 818ed108b56SAlexei Fedorovendfunc save_gp_pmcr_pauth_regs 819ed108b56SAlexei Fedorov 820ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 821ed108b56SAlexei Fedorov * This function restores ARMv8.3-PAuth (if enabled) and all general 822ed108b56SAlexei Fedorov * purpose registers except x30 from the CPU context. 823ed108b56SAlexei Fedorov * x30 register must be explicitly restored by the caller. 824ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 825ed108b56SAlexei Fedorov */ 826ed108b56SAlexei Fedorovfunc restore_gp_pmcr_pauth_regs 827ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS 828ed108b56SAlexei Fedorov /* Restore the ARMv8.3 PAuth keys */ 829ed108b56SAlexei Fedorov add x10, sp, #CTX_PAUTH_REGS_OFFSET 830ed108b56SAlexei Fedorov 831ed108b56SAlexei Fedorov ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */ 832ed108b56SAlexei Fedorov ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */ 833ed108b56SAlexei Fedorov ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */ 834ed108b56SAlexei Fedorov ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */ 835ed108b56SAlexei Fedorov ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */ 836ed108b56SAlexei Fedorov 837ed108b56SAlexei Fedorov msr APIAKeyLo_EL1, x0 838ed108b56SAlexei Fedorov msr APIAKeyHi_EL1, x1 839ed108b56SAlexei Fedorov msr APIBKeyLo_EL1, x2 840ed108b56SAlexei Fedorov msr APIBKeyHi_EL1, x3 841ed108b56SAlexei Fedorov msr APDAKeyLo_EL1, x4 842ed108b56SAlexei Fedorov msr APDAKeyHi_EL1, x5 843ed108b56SAlexei Fedorov msr APDBKeyLo_EL1, x6 844ed108b56SAlexei Fedorov msr APDBKeyHi_EL1, x7 845ed108b56SAlexei Fedorov msr APGAKeyLo_EL1, x8 846ed108b56SAlexei Fedorov msr APGAKeyHi_EL1, x9 847ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */ 848ed108b56SAlexei Fedorov 849ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 850ed108b56SAlexei Fedorov * Restore PMCR_EL0 when returning to Non-secure state if 851ed108b56SAlexei Fedorov * Secure Cycle Counter is not disabled in MDCR_EL3 when 852ed108b56SAlexei Fedorov * ARMv8.5-PMU is implemented. 853ed108b56SAlexei Fedorov * ---------------------------------------------------------- 854ed108b56SAlexei Fedorov */ 855ed108b56SAlexei Fedorov mrs x0, scr_el3 856ed108b56SAlexei Fedorov tst x0, #SCR_NS_BIT 857ed108b56SAlexei Fedorov beq 2f 858ed108b56SAlexei Fedorov 859ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 860ed108b56SAlexei Fedorov * Back to Non-secure state. 861ed108b56SAlexei Fedorov * Check if earlier initialization MDCR_EL3.SCCD to 1 failed, 862ed108b56SAlexei Fedorov * meaning that ARMv8-PMU is not implemented and PMCR_EL0 863ed108b56SAlexei Fedorov * should be restored from non-secure context. 864ed108b56SAlexei Fedorov * ---------------------------------------------------------- 865ed108b56SAlexei Fedorov */ 866ed108b56SAlexei Fedorov mrs x0, mdcr_el3 867ed108b56SAlexei Fedorov tst x0, #MDCR_SCCD_BIT 868ed108b56SAlexei Fedorov bne 2f 869ed108b56SAlexei Fedorov ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] 870ed108b56SAlexei Fedorov msr pmcr_el0, x0 871ed108b56SAlexei Fedorov2: 872532ed618SSoby Mathew ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 873532ed618SSoby Mathew ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 874532ed618SSoby Mathew ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 875532ed618SSoby Mathew ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] 876532ed618SSoby Mathew ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] 877532ed618SSoby Mathew ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] 878532ed618SSoby Mathew ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] 879532ed618SSoby Mathew ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] 880ef653d93SJeenu Viswambharan ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] 881532ed618SSoby Mathew ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] 882532ed618SSoby Mathew ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] 883532ed618SSoby Mathew ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] 884532ed618SSoby Mathew ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] 885532ed618SSoby Mathew ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] 886ef653d93SJeenu Viswambharan ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] 887ef653d93SJeenu Viswambharan msr sp_el0, x28 888532ed618SSoby Mathew ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] 889ef653d93SJeenu Viswambharan ret 890ed108b56SAlexei Fedorovendfunc restore_gp_pmcr_pauth_regs 891ef653d93SJeenu Viswambharan 8923b8456bdSManish V Badarkhe/* 8933b8456bdSManish V Badarkhe * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1 8943b8456bdSManish V Badarkhe * registers and update EL1 registers to disable stage1 and stage2 8953b8456bdSManish V Badarkhe * page table walk 8963b8456bdSManish V Badarkhe */ 8973b8456bdSManish V Badarkhefunc save_and_update_ptw_el1_sys_regs 8983b8456bdSManish V Badarkhe /* ---------------------------------------------------------- 8993b8456bdSManish V Badarkhe * Save only sctlr_el1 and tcr_el1 registers 9003b8456bdSManish V Badarkhe * ---------------------------------------------------------- 9013b8456bdSManish V Badarkhe */ 9023b8456bdSManish V Badarkhe mrs x29, sctlr_el1 9033b8456bdSManish V Badarkhe str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)] 9043b8456bdSManish V Badarkhe mrs x29, tcr_el1 9053b8456bdSManish V Badarkhe str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)] 9063b8456bdSManish V Badarkhe 9073b8456bdSManish V Badarkhe /* ------------------------------------------------------------ 9083b8456bdSManish V Badarkhe * Must follow below order in order to disable page table 9093b8456bdSManish V Badarkhe * walk for lower ELs (EL1 and EL0). First step ensures that 9103b8456bdSManish V Badarkhe * page table walk is disabled for stage1 and second step 9113b8456bdSManish V Badarkhe * ensures that page table walker should use TCR_EL1.EPDx 9123b8456bdSManish V Badarkhe * bits to perform address translation. ISB ensures that CPU 9133b8456bdSManish V Badarkhe * does these 2 steps in order. 9143b8456bdSManish V Badarkhe * 9153b8456bdSManish V Badarkhe * 1. Update TCR_EL1.EPDx bits to disable page table walk by 9163b8456bdSManish V Badarkhe * stage1. 9173b8456bdSManish V Badarkhe * 2. Enable MMU bit to avoid identity mapping via stage2 9183b8456bdSManish V Badarkhe * and force TCR_EL1.EPDx to be used by the page table 9193b8456bdSManish V Badarkhe * walker. 9203b8456bdSManish V Badarkhe * ------------------------------------------------------------ 9213b8456bdSManish V Badarkhe */ 9223b8456bdSManish V Badarkhe orr x29, x29, #(TCR_EPD0_BIT) 9233b8456bdSManish V Badarkhe orr x29, x29, #(TCR_EPD1_BIT) 9243b8456bdSManish V Badarkhe msr tcr_el1, x29 9253b8456bdSManish V Badarkhe isb 9263b8456bdSManish V Badarkhe mrs x29, sctlr_el1 9273b8456bdSManish V Badarkhe orr x29, x29, #SCTLR_M_BIT 9283b8456bdSManish V Badarkhe msr sctlr_el1, x29 9293b8456bdSManish V Badarkhe isb 9303b8456bdSManish V Badarkhe 9313b8456bdSManish V Badarkhe ret 9323b8456bdSManish V Badarkheendfunc save_and_update_ptw_el1_sys_regs 9333b8456bdSManish V Badarkhe 934ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 935ed108b56SAlexei Fedorov * This routine assumes that the SP_EL3 is pointing to a valid 936ed108b56SAlexei Fedorov * context structure from where the gp regs and other special 937ed108b56SAlexei Fedorov * registers can be retrieved. 938ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 939532ed618SSoby Mathew */ 940532ed618SSoby Mathewfunc el3_exit 941bb9549baSJan Dabros#if ENABLE_ASSERTIONS 942bb9549baSJan Dabros /* el3_exit assumes SP_EL0 on entry */ 943bb9549baSJan Dabros mrs x17, spsel 944bb9549baSJan Dabros cmp x17, #MODE_SP_EL0 945bb9549baSJan Dabros ASM_ASSERT(eq) 946bb9549baSJan Dabros#endif 947bb9549baSJan Dabros 948ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 949ed108b56SAlexei Fedorov * Save the current SP_EL0 i.e. the EL3 runtime stack which 950ed108b56SAlexei Fedorov * will be used for handling the next SMC. 951ed108b56SAlexei Fedorov * Then switch to SP_EL3. 952ed108b56SAlexei Fedorov * ---------------------------------------------------------- 953532ed618SSoby Mathew */ 954532ed618SSoby Mathew mov x17, sp 955ed108b56SAlexei Fedorov msr spsel, #MODE_SP_ELX 956532ed618SSoby Mathew str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] 957532ed618SSoby Mathew 958ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 959532ed618SSoby Mathew * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET 960ed108b56SAlexei Fedorov * ---------------------------------------------------------- 961532ed618SSoby Mathew */ 962532ed618SSoby Mathew ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] 963532ed618SSoby Mathew ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] 964532ed618SSoby Mathew msr scr_el3, x18 965532ed618SSoby Mathew msr spsr_el3, x16 966532ed618SSoby Mathew msr elr_el3, x17 967532ed618SSoby Mathew 968fe007b2eSDimitris Papastamos#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 969ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 970ed108b56SAlexei Fedorov * Restore mitigation state as it was on entry to EL3 971ed108b56SAlexei Fedorov * ---------------------------------------------------------- 972ed108b56SAlexei Fedorov */ 973fe007b2eSDimitris Papastamos ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE] 974ed108b56SAlexei Fedorov cbz x17, 1f 975fe007b2eSDimitris Papastamos blr x17 9764d1ccf0eSAntonio Nino Diaz1: 977fe007b2eSDimitris Papastamos#endif 9783b8456bdSManish V Badarkhe restore_ptw_el1_sys_regs 9793b8456bdSManish V Badarkhe 980ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 981ed108b56SAlexei Fedorov * Restore general purpose (including x30), PMCR_EL0 and 982ed108b56SAlexei Fedorov * ARMv8.3-PAuth registers. 983ed108b56SAlexei Fedorov * Exit EL3 via ERET to a lower exception level. 984ed108b56SAlexei Fedorov * ---------------------------------------------------------- 985ed108b56SAlexei Fedorov */ 986ed108b56SAlexei Fedorov bl restore_gp_pmcr_pauth_regs 987ed108b56SAlexei Fedorov ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 988fe007b2eSDimitris Papastamos 989ed108b56SAlexei Fedorov#if IMAGE_BL31 && RAS_EXTENSION 990ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 991ed108b56SAlexei Fedorov * Issue Error Synchronization Barrier to synchronize SErrors 992ed108b56SAlexei Fedorov * before exiting EL3. We're running with EAs unmasked, so 993ed108b56SAlexei Fedorov * any synchronized errors would be taken immediately; 994ed108b56SAlexei Fedorov * therefore no need to inspect DISR_EL1 register. 995ed108b56SAlexei Fedorov * ---------------------------------------------------------- 996ed108b56SAlexei Fedorov */ 997ed108b56SAlexei Fedorov esb 998*c2d32a5fSMadhukar Pappireddy#else 999*c2d32a5fSMadhukar Pappireddy dsb sy 1000*c2d32a5fSMadhukar Pappireddy#endif 1001*c2d32a5fSMadhukar Pappireddy#ifdef IMAGE_BL31 1002*c2d32a5fSMadhukar Pappireddy str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3] 10035283962eSAntonio Nino Diaz#endif 1004f461fe34SAnthony Steinhauser exception_return 10055283962eSAntonio Nino Diaz 1006532ed618SSoby Mathewendfunc el3_exit 1007