1532ed618SSoby Mathew/* 228f39f02SMax Shvetsov * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved. 3532ed618SSoby Mathew * 482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause 5532ed618SSoby Mathew */ 6532ed618SSoby Mathew 7532ed618SSoby Mathew#include <arch.h> 8532ed618SSoby Mathew#include <asm_macros.S> 9bb9549baSJan Dabros#include <assert_macros.S> 10532ed618SSoby Mathew#include <context.h> 113b8456bdSManish V Badarkhe#include <el3_common_macros.S> 12532ed618SSoby Mathew 1328f39f02SMax Shvetsov#if CTX_INCLUDE_EL2_REGS 1428f39f02SMax Shvetsov .global el2_sysregs_context_save 1528f39f02SMax Shvetsov .global el2_sysregs_context_restore 1628f39f02SMax Shvetsov#endif 1728f39f02SMax Shvetsov 18532ed618SSoby Mathew .global el1_sysregs_context_save 19532ed618SSoby Mathew .global el1_sysregs_context_restore 20532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS 21532ed618SSoby Mathew .global fpregs_context_save 22532ed618SSoby Mathew .global fpregs_context_restore 23532ed618SSoby Mathew#endif 24ed108b56SAlexei Fedorov .global save_gp_pmcr_pauth_regs 25ed108b56SAlexei Fedorov .global restore_gp_pmcr_pauth_regs 263b8456bdSManish V Badarkhe .global save_and_update_ptw_el1_sys_regs 27532ed618SSoby Mathew .global el3_exit 28532ed618SSoby Mathew 2928f39f02SMax Shvetsov#if CTX_INCLUDE_EL2_REGS 3028f39f02SMax Shvetsov 3128f39f02SMax Shvetsov/* ----------------------------------------------------- 3228f39f02SMax Shvetsov * The following function strictly follows the AArch64 3328f39f02SMax Shvetsov * PCS to use x9-x17 (temporary caller-saved registers) 342825946eSMax Shvetsov * to save EL2 system register context. It assumes that 352825946eSMax Shvetsov * 'x0' is pointing to a 'el2_sys_regs' structure where 3628f39f02SMax Shvetsov * the register context will be saved. 372825946eSMax Shvetsov * 382825946eSMax Shvetsov * The following registers are not added. 392825946eSMax Shvetsov * AMEVCNTVOFF0<n>_EL2 402825946eSMax Shvetsov * AMEVCNTVOFF1<n>_EL2 412825946eSMax Shvetsov * ICH_AP0R<n>_EL2 422825946eSMax Shvetsov * ICH_AP1R<n>_EL2 432825946eSMax Shvetsov * ICH_LR<n>_EL2 4428f39f02SMax Shvetsov * ----------------------------------------------------- 4528f39f02SMax Shvetsov */ 462825946eSMax Shvetsov 4728f39f02SMax Shvetsovfunc el2_sysregs_context_save 4828f39f02SMax Shvetsov mrs x9, actlr_el2 492825946eSMax Shvetsov mrs x10, afsr0_el2 502825946eSMax Shvetsov stp x9, x10, [x0, #CTX_ACTLR_EL2] 5128f39f02SMax Shvetsov 522825946eSMax Shvetsov mrs x11, afsr1_el2 532825946eSMax Shvetsov mrs x12, amair_el2 542825946eSMax Shvetsov stp x11, x12, [x0, #CTX_AFSR1_EL2] 5528f39f02SMax Shvetsov 562825946eSMax Shvetsov mrs x13, cnthctl_el2 572825946eSMax Shvetsov mrs x14, cnthp_ctl_el2 582825946eSMax Shvetsov stp x13, x14, [x0, #CTX_CNTHCTL_EL2] 5928f39f02SMax Shvetsov 602825946eSMax Shvetsov mrs x15, cnthp_cval_el2 612825946eSMax Shvetsov mrs x16, cnthp_tval_el2 622825946eSMax Shvetsov stp x15, x16, [x0, #CTX_CNTHP_CVAL_EL2] 6328f39f02SMax Shvetsov 642825946eSMax Shvetsov mrs x17, cntvoff_el2 6528f39f02SMax Shvetsov mrs x9, cptr_el2 662825946eSMax Shvetsov stp x17, x9, [x0, #CTX_CNTVOFF_EL2] 6728f39f02SMax Shvetsov 682825946eSMax Shvetsov mrs x11, elr_el2 69*0f777eabSArunachalam Ganapathy#if CTX_INCLUDE_AARCH32_REGS 70*0f777eabSArunachalam Ganapathy mrs x10, dbgvcr32_el2 712825946eSMax Shvetsov stp x10, x11, [x0, #CTX_DBGVCR32_EL2] 72*0f777eabSArunachalam Ganapathy#else 73*0f777eabSArunachalam Ganapathy str x11, [x0, #CTX_ELR_EL2] 74*0f777eabSArunachalam Ganapathy#endif 7528f39f02SMax Shvetsov 762825946eSMax Shvetsov mrs x14, esr_el2 772825946eSMax Shvetsov mrs x15, far_el2 782825946eSMax Shvetsov stp x14, x15, [x0, #CTX_ESR_EL2] 7928f39f02SMax Shvetsov 8030ee3755SMax Shvetsov mrs x16, hacr_el2 8130ee3755SMax Shvetsov mrs x17, hcr_el2 8230ee3755SMax Shvetsov stp x16, x17, [x0, #CTX_HACR_EL2] 8328f39f02SMax Shvetsov 8430ee3755SMax Shvetsov mrs x9, hpfar_el2 8530ee3755SMax Shvetsov mrs x10, hstr_el2 8630ee3755SMax Shvetsov stp x9, x10, [x0, #CTX_HPFAR_EL2] 8728f39f02SMax Shvetsov 8830ee3755SMax Shvetsov mrs x11, ICC_SRE_EL2 8930ee3755SMax Shvetsov mrs x12, ICH_HCR_EL2 9030ee3755SMax Shvetsov stp x11, x12, [x0, #CTX_ICC_SRE_EL2] 9128f39f02SMax Shvetsov 9230ee3755SMax Shvetsov mrs x13, ICH_VMCR_EL2 9330ee3755SMax Shvetsov mrs x14, mair_el2 9430ee3755SMax Shvetsov stp x13, x14, [x0, #CTX_ICH_VMCR_EL2] 9528f39f02SMax Shvetsov 9630ee3755SMax Shvetsov mrs x15, mdcr_el2 9730ee3755SMax Shvetsov mrs x16, PMSCR_EL2 9830ee3755SMax Shvetsov stp x15, x16, [x0, #CTX_MDCR_EL2] 9928f39f02SMax Shvetsov 10030ee3755SMax Shvetsov mrs x17, sctlr_el2 10130ee3755SMax Shvetsov mrs x9, spsr_el2 10230ee3755SMax Shvetsov stp x17, x9, [x0, #CTX_SCTLR_EL2] 10328f39f02SMax Shvetsov 10430ee3755SMax Shvetsov mrs x10, sp_el2 10530ee3755SMax Shvetsov mrs x11, tcr_el2 10630ee3755SMax Shvetsov stp x10, x11, [x0, #CTX_SP_EL2] 10728f39f02SMax Shvetsov 10830ee3755SMax Shvetsov mrs x12, tpidr_el2 10930ee3755SMax Shvetsov mrs x13, ttbr0_el2 11030ee3755SMax Shvetsov stp x12, x13, [x0, #CTX_TPIDR_EL2] 11128f39f02SMax Shvetsov 11230ee3755SMax Shvetsov mrs x14, vbar_el2 11330ee3755SMax Shvetsov mrs x15, vmpidr_el2 11430ee3755SMax Shvetsov stp x14, x15, [x0, #CTX_VBAR_EL2] 11528f39f02SMax Shvetsov 11630ee3755SMax Shvetsov mrs x16, vpidr_el2 11730ee3755SMax Shvetsov mrs x17, vtcr_el2 11830ee3755SMax Shvetsov stp x16, x17, [x0, #CTX_VPIDR_EL2] 11928f39f02SMax Shvetsov 12030ee3755SMax Shvetsov mrs x9, vttbr_el2 12130ee3755SMax Shvetsov str x9, [x0, #CTX_VTTBR_EL2] 12228f39f02SMax Shvetsov 1232825946eSMax Shvetsov#if CTX_INCLUDE_MTE_REGS 12430ee3755SMax Shvetsov mrs x10, TFSR_EL2 12530ee3755SMax Shvetsov str x10, [x0, #CTX_TFSR_EL2] 1262825946eSMax Shvetsov#endif 12728f39f02SMax Shvetsov 1282825946eSMax Shvetsov#if ENABLE_MPAM_FOR_LOWER_ELS 1292825946eSMax Shvetsov mrs x9, MPAM2_EL2 1302825946eSMax Shvetsov mrs x10, MPAMHCR_EL2 1312825946eSMax Shvetsov stp x9, x10, [x0, #CTX_MPAM2_EL2] 1322825946eSMax Shvetsov 1332825946eSMax Shvetsov mrs x11, MPAMVPM0_EL2 1342825946eSMax Shvetsov mrs x12, MPAMVPM1_EL2 1352825946eSMax Shvetsov stp x11, x12, [x0, #CTX_MPAMVPM0_EL2] 1362825946eSMax Shvetsov 1372825946eSMax Shvetsov mrs x13, MPAMVPM2_EL2 1382825946eSMax Shvetsov mrs x14, MPAMVPM3_EL2 1392825946eSMax Shvetsov stp x13, x14, [x0, #CTX_MPAMVPM2_EL2] 1402825946eSMax Shvetsov 1412825946eSMax Shvetsov mrs x15, MPAMVPM4_EL2 1422825946eSMax Shvetsov mrs x16, MPAMVPM5_EL2 1432825946eSMax Shvetsov stp x15, x16, [x0, #CTX_MPAMVPM4_EL2] 1442825946eSMax Shvetsov 1452825946eSMax Shvetsov mrs x17, MPAMVPM6_EL2 1462825946eSMax Shvetsov mrs x9, MPAMVPM7_EL2 1472825946eSMax Shvetsov stp x17, x9, [x0, #CTX_MPAMVPM6_EL2] 1482825946eSMax Shvetsov 1492825946eSMax Shvetsov mrs x10, MPAMVPMV_EL2 1502825946eSMax Shvetsov str x10, [x0, #CTX_MPAMVPMV_EL2] 1512825946eSMax Shvetsov#endif 1522825946eSMax Shvetsov 1532825946eSMax Shvetsov 1542825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 6) 1552825946eSMax Shvetsov mrs x11, HAFGRTR_EL2 1562825946eSMax Shvetsov mrs x12, HDFGRTR_EL2 1572825946eSMax Shvetsov stp x11, x12, [x0, #CTX_HAFGRTR_EL2] 1582825946eSMax Shvetsov 1592825946eSMax Shvetsov mrs x13, HDFGWTR_EL2 1602825946eSMax Shvetsov mrs x14, HFGITR_EL2 1612825946eSMax Shvetsov stp x13, x14, [x0, #CTX_HDFGWTR_EL2] 1622825946eSMax Shvetsov 1632825946eSMax Shvetsov mrs x15, HFGRTR_EL2 1642825946eSMax Shvetsov mrs x16, HFGWTR_EL2 1652825946eSMax Shvetsov stp x15, x16, [x0, #CTX_HFGRTR_EL2] 1662825946eSMax Shvetsov 1672825946eSMax Shvetsov mrs x17, CNTPOFF_EL2 1682825946eSMax Shvetsov str x17, [x0, #CTX_CNTPOFF_EL2] 1692825946eSMax Shvetsov#endif 1702825946eSMax Shvetsov 1712825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 4) 1722825946eSMax Shvetsov mrs x9, cnthps_ctl_el2 1732825946eSMax Shvetsov mrs x10, cnthps_cval_el2 1742825946eSMax Shvetsov stp x9, x10, [x0, #CTX_CNTHPS_CTL_EL2] 1752825946eSMax Shvetsov 1762825946eSMax Shvetsov mrs x11, cnthps_tval_el2 1772825946eSMax Shvetsov mrs x12, cnthvs_ctl_el2 1782825946eSMax Shvetsov stp x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2] 1792825946eSMax Shvetsov 1802825946eSMax Shvetsov mrs x13, cnthvs_cval_el2 1812825946eSMax Shvetsov mrs x14, cnthvs_tval_el2 1822825946eSMax Shvetsov stp x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2] 1832825946eSMax Shvetsov 1842825946eSMax Shvetsov mrs x15, cnthv_ctl_el2 1852825946eSMax Shvetsov mrs x16, cnthv_cval_el2 1862825946eSMax Shvetsov stp x15, x16, [x0, #CTX_CNTHV_CTL_EL2] 1872825946eSMax Shvetsov 1882825946eSMax Shvetsov mrs x17, cnthv_tval_el2 1892825946eSMax Shvetsov mrs x9, contextidr_el2 1902825946eSMax Shvetsov stp x17, x9, [x0, #CTX_CNTHV_TVAL_EL2] 1912825946eSMax Shvetsov 192*0f777eabSArunachalam Ganapathy#if CTX_INCLUDE_AARCH32_REGS 1932825946eSMax Shvetsov mrs x10, sder32_el2 1942825946eSMax Shvetsov str x10, [x0, #CTX_SDER32_EL2] 195*0f777eabSArunachalam Ganapathy#endif 1962825946eSMax Shvetsov 1972825946eSMax Shvetsov mrs x11, ttbr1_el2 1982825946eSMax Shvetsov str x11, [x0, #CTX_TTBR1_EL2] 1992825946eSMax Shvetsov 2002825946eSMax Shvetsov mrs x12, vdisr_el2 2012825946eSMax Shvetsov str x12, [x0, #CTX_VDISR_EL2] 2022825946eSMax Shvetsov 2032825946eSMax Shvetsov mrs x13, vncr_el2 2042825946eSMax Shvetsov str x13, [x0, #CTX_VNCR_EL2] 2052825946eSMax Shvetsov 2062825946eSMax Shvetsov mrs x14, vsesr_el2 2072825946eSMax Shvetsov str x14, [x0, #CTX_VSESR_EL2] 2082825946eSMax Shvetsov 2092825946eSMax Shvetsov mrs x15, vstcr_el2 2102825946eSMax Shvetsov str x15, [x0, #CTX_VSTCR_EL2] 2112825946eSMax Shvetsov 2122825946eSMax Shvetsov mrs x16, vsttbr_el2 2132825946eSMax Shvetsov str x16, [x0, #CTX_VSTTBR_EL2] 2147f164a83SOlivier Deprez 2157f164a83SOlivier Deprez mrs x17, TRFCR_EL2 2167f164a83SOlivier Deprez str x17, [x0, #CTX_TRFCR_EL2] 2172825946eSMax Shvetsov#endif 2182825946eSMax Shvetsov 2192825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 5) 2207f164a83SOlivier Deprez mrs x9, scxtnum_el2 2217f164a83SOlivier Deprez str x9, [x0, #CTX_SCXTNUM_EL2] 2222825946eSMax Shvetsov#endif 22328f39f02SMax Shvetsov 22428f39f02SMax Shvetsov ret 22528f39f02SMax Shvetsovendfunc el2_sysregs_context_save 22628f39f02SMax Shvetsov 22728f39f02SMax Shvetsov/* ----------------------------------------------------- 22828f39f02SMax Shvetsov * The following function strictly follows the AArch64 22928f39f02SMax Shvetsov * PCS to use x9-x17 (temporary caller-saved registers) 2302825946eSMax Shvetsov * to restore EL2 system register context. It assumes 2312825946eSMax Shvetsov * that 'x0' is pointing to a 'el2_sys_regs' structure 23228f39f02SMax Shvetsov * from where the register context will be restored 2332825946eSMax Shvetsov 2342825946eSMax Shvetsov * The following registers are not restored 2352825946eSMax Shvetsov * AMEVCNTVOFF0<n>_EL2 2362825946eSMax Shvetsov * AMEVCNTVOFF1<n>_EL2 2372825946eSMax Shvetsov * ICH_AP0R<n>_EL2 2382825946eSMax Shvetsov * ICH_AP1R<n>_EL2 2392825946eSMax Shvetsov * ICH_LR<n>_EL2 24028f39f02SMax Shvetsov * ----------------------------------------------------- 24128f39f02SMax Shvetsov */ 24228f39f02SMax Shvetsovfunc el2_sysregs_context_restore 24328f39f02SMax Shvetsov 2442825946eSMax Shvetsov ldp x9, x10, [x0, #CTX_ACTLR_EL2] 24528f39f02SMax Shvetsov msr actlr_el2, x9 2462825946eSMax Shvetsov msr afsr0_el2, x10 24728f39f02SMax Shvetsov 2482825946eSMax Shvetsov ldp x11, x12, [x0, #CTX_AFSR1_EL2] 2492825946eSMax Shvetsov msr afsr1_el2, x11 2502825946eSMax Shvetsov msr amair_el2, x12 25128f39f02SMax Shvetsov 2522825946eSMax Shvetsov ldp x13, x14, [x0, #CTX_CNTHCTL_EL2] 2532825946eSMax Shvetsov msr cnthctl_el2, x13 2542825946eSMax Shvetsov msr cnthp_ctl_el2, x14 25528f39f02SMax Shvetsov 2562825946eSMax Shvetsov ldp x15, x16, [x0, #CTX_CNTHP_CVAL_EL2] 2572825946eSMax Shvetsov msr cnthp_cval_el2, x15 2582825946eSMax Shvetsov msr cnthp_tval_el2, x16 25928f39f02SMax Shvetsov 2602825946eSMax Shvetsov ldp x17, x9, [x0, #CTX_CNTVOFF_EL2] 2612825946eSMax Shvetsov msr cntvoff_el2, x17 26228f39f02SMax Shvetsov msr cptr_el2, x9 26328f39f02SMax Shvetsov 264*0f777eabSArunachalam Ganapathy#if CTX_INCLUDE_AARCH32_REGS 2652825946eSMax Shvetsov ldp x10, x11, [x0, #CTX_DBGVCR32_EL2] 2662825946eSMax Shvetsov msr dbgvcr32_el2, x10 267*0f777eabSArunachalam Ganapathy#else 268*0f777eabSArunachalam Ganapathy ldr x11, [x0, #CTX_ELR_EL2] 269*0f777eabSArunachalam Ganapathy#endif 2702825946eSMax Shvetsov msr elr_el2, x11 27128f39f02SMax Shvetsov 2722825946eSMax Shvetsov ldp x14, x15, [x0, #CTX_ESR_EL2] 2732825946eSMax Shvetsov msr esr_el2, x14 2742825946eSMax Shvetsov msr far_el2, x15 27528f39f02SMax Shvetsov 27630ee3755SMax Shvetsov ldp x16, x17, [x0, #CTX_HACR_EL2] 27730ee3755SMax Shvetsov msr hacr_el2, x16 27830ee3755SMax Shvetsov msr hcr_el2, x17 27928f39f02SMax Shvetsov 28030ee3755SMax Shvetsov ldp x9, x10, [x0, #CTX_HPFAR_EL2] 28130ee3755SMax Shvetsov msr hpfar_el2, x9 28230ee3755SMax Shvetsov msr hstr_el2, x10 28328f39f02SMax Shvetsov 28430ee3755SMax Shvetsov ldp x11, x12, [x0, #CTX_ICC_SRE_EL2] 28530ee3755SMax Shvetsov msr ICC_SRE_EL2, x11 28630ee3755SMax Shvetsov msr ICH_HCR_EL2, x12 28728f39f02SMax Shvetsov 28830ee3755SMax Shvetsov ldp x13, x14, [x0, #CTX_ICH_VMCR_EL2] 28930ee3755SMax Shvetsov msr ICH_VMCR_EL2, x13 29030ee3755SMax Shvetsov msr mair_el2, x14 29128f39f02SMax Shvetsov 29230ee3755SMax Shvetsov ldp x15, x16, [x0, #CTX_MDCR_EL2] 29330ee3755SMax Shvetsov msr mdcr_el2, x15 29430ee3755SMax Shvetsov msr PMSCR_EL2, x16 29528f39f02SMax Shvetsov 296fb2072b0SManish V Badarkhe ldp x17, x9, [x0, #CTX_SCTLR_EL2] 297fb2072b0SManish V Badarkhe msr sctlr_el2, x17 298fb2072b0SManish V Badarkhe msr spsr_el2, x9 29928f39f02SMax Shvetsov 300fb2072b0SManish V Badarkhe ldp x10, x11, [x0, #CTX_SP_EL2] 301fb2072b0SManish V Badarkhe msr sp_el2, x10 302fb2072b0SManish V Badarkhe msr tcr_el2, x11 30328f39f02SMax Shvetsov 304fb2072b0SManish V Badarkhe ldp x12, x13, [x0, #CTX_TPIDR_EL2] 305fb2072b0SManish V Badarkhe msr tpidr_el2, x12 306fb2072b0SManish V Badarkhe msr ttbr0_el2, x13 30728f39f02SMax Shvetsov 308fb2072b0SManish V Badarkhe ldp x13, x14, [x0, #CTX_VBAR_EL2] 309fb2072b0SManish V Badarkhe msr vbar_el2, x13 310fb2072b0SManish V Badarkhe msr vmpidr_el2, x14 31128f39f02SMax Shvetsov 312fb2072b0SManish V Badarkhe ldp x15, x16, [x0, #CTX_VPIDR_EL2] 313fb2072b0SManish V Badarkhe msr vpidr_el2, x15 314fb2072b0SManish V Badarkhe msr vtcr_el2, x16 315fb2072b0SManish V Badarkhe 316fb2072b0SManish V Badarkhe ldr x17, [x0, #CTX_VTTBR_EL2] 317fb2072b0SManish V Badarkhe msr vttbr_el2, x17 31828f39f02SMax Shvetsov 3192825946eSMax Shvetsov#if CTX_INCLUDE_MTE_REGS 320fb2072b0SManish V Badarkhe ldr x9, [x0, #CTX_TFSR_EL2] 321fb2072b0SManish V Badarkhe msr TFSR_EL2, x9 3222825946eSMax Shvetsov#endif 32328f39f02SMax Shvetsov 3242825946eSMax Shvetsov#if ENABLE_MPAM_FOR_LOWER_ELS 325fb2072b0SManish V Badarkhe ldp x10, x11, [x0, #CTX_MPAM2_EL2] 326fb2072b0SManish V Badarkhe msr MPAM2_EL2, x10 327fb2072b0SManish V Badarkhe msr MPAMHCR_EL2, x11 3282825946eSMax Shvetsov 329fb2072b0SManish V Badarkhe ldp x12, x13, [x0, #CTX_MPAMVPM0_EL2] 330fb2072b0SManish V Badarkhe msr MPAMVPM0_EL2, x12 331fb2072b0SManish V Badarkhe msr MPAMVPM1_EL2, x13 3322825946eSMax Shvetsov 333fb2072b0SManish V Badarkhe ldp x14, x15, [x0, #CTX_MPAMVPM2_EL2] 334fb2072b0SManish V Badarkhe msr MPAMVPM2_EL2, x14 335fb2072b0SManish V Badarkhe msr MPAMVPM3_EL2, x15 3362825946eSMax Shvetsov 337fb2072b0SManish V Badarkhe ldp x16, x17, [x0, #CTX_MPAMVPM4_EL2] 338fb2072b0SManish V Badarkhe msr MPAMVPM4_EL2, x16 339fb2072b0SManish V Badarkhe msr MPAMVPM5_EL2, x17 3402825946eSMax Shvetsov 341fb2072b0SManish V Badarkhe ldp x9, x10, [x0, #CTX_MPAMVPM6_EL2] 342fb2072b0SManish V Badarkhe msr MPAMVPM6_EL2, x9 343fb2072b0SManish V Badarkhe msr MPAMVPM7_EL2, x10 3442825946eSMax Shvetsov 345fb2072b0SManish V Badarkhe ldr x11, [x0, #CTX_MPAMVPMV_EL2] 346fb2072b0SManish V Badarkhe msr MPAMVPMV_EL2, x11 3472825946eSMax Shvetsov#endif 3482825946eSMax Shvetsov 3492825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 6) 350fb2072b0SManish V Badarkhe ldp x12, x13, [x0, #CTX_HAFGRTR_EL2] 351fb2072b0SManish V Badarkhe msr HAFGRTR_EL2, x12 352fb2072b0SManish V Badarkhe msr HDFGRTR_EL2, x13 3532825946eSMax Shvetsov 354fb2072b0SManish V Badarkhe ldp x14, x15, [x0, #CTX_HDFGWTR_EL2] 355fb2072b0SManish V Badarkhe msr HDFGWTR_EL2, x14 356fb2072b0SManish V Badarkhe msr HFGITR_EL2, x15 3572825946eSMax Shvetsov 358fb2072b0SManish V Badarkhe ldp x16, x17, [x0, #CTX_HFGRTR_EL2] 359fb2072b0SManish V Badarkhe msr HFGRTR_EL2, x16 360fb2072b0SManish V Badarkhe msr HFGWTR_EL2, x17 3612825946eSMax Shvetsov 362fb2072b0SManish V Badarkhe ldr x9, [x0, #CTX_CNTPOFF_EL2] 363fb2072b0SManish V Badarkhe msr CNTPOFF_EL2, x9 3642825946eSMax Shvetsov#endif 3652825946eSMax Shvetsov 3662825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 4) 367fb2072b0SManish V Badarkhe ldp x10, x11, [x0, #CTX_CNTHPS_CTL_EL2] 368fb2072b0SManish V Badarkhe msr cnthps_ctl_el2, x10 369fb2072b0SManish V Badarkhe msr cnthps_cval_el2, x11 3702825946eSMax Shvetsov 371fb2072b0SManish V Badarkhe ldp x12, x13, [x0, #CTX_CNTHPS_TVAL_EL2] 372fb2072b0SManish V Badarkhe msr cnthps_tval_el2, x12 373fb2072b0SManish V Badarkhe msr cnthvs_ctl_el2, x13 3742825946eSMax Shvetsov 375fb2072b0SManish V Badarkhe ldp x14, x15, [x0, #CTX_CNTHVS_CVAL_EL2] 376fb2072b0SManish V Badarkhe msr cnthvs_cval_el2, x14 377fb2072b0SManish V Badarkhe msr cnthvs_tval_el2, x15 3782825946eSMax Shvetsov 379fb2072b0SManish V Badarkhe ldp x16, x17, [x0, #CTX_CNTHV_CTL_EL2] 380fb2072b0SManish V Badarkhe msr cnthv_ctl_el2, x16 381fb2072b0SManish V Badarkhe msr cnthv_cval_el2, x17 3822825946eSMax Shvetsov 383fb2072b0SManish V Badarkhe ldp x9, x10, [x0, #CTX_CNTHV_TVAL_EL2] 384fb2072b0SManish V Badarkhe msr cnthv_tval_el2, x9 385fb2072b0SManish V Badarkhe msr contextidr_el2, x10 3862825946eSMax Shvetsov 387*0f777eabSArunachalam Ganapathy#if CTX_INCLUDE_AARCH32_REGS 388fb2072b0SManish V Badarkhe ldr x11, [x0, #CTX_SDER32_EL2] 389fb2072b0SManish V Badarkhe msr sder32_el2, x11 390*0f777eabSArunachalam Ganapathy#endif 3912825946eSMax Shvetsov 392fb2072b0SManish V Badarkhe ldr x12, [x0, #CTX_TTBR1_EL2] 393fb2072b0SManish V Badarkhe msr ttbr1_el2, x12 3942825946eSMax Shvetsov 395fb2072b0SManish V Badarkhe ldr x13, [x0, #CTX_VDISR_EL2] 396fb2072b0SManish V Badarkhe msr vdisr_el2, x13 3972825946eSMax Shvetsov 398fb2072b0SManish V Badarkhe ldr x14, [x0, #CTX_VNCR_EL2] 399fb2072b0SManish V Badarkhe msr vncr_el2, x14 4002825946eSMax Shvetsov 401fb2072b0SManish V Badarkhe ldr x15, [x0, #CTX_VSESR_EL2] 402fb2072b0SManish V Badarkhe msr vsesr_el2, x15 4032825946eSMax Shvetsov 404fb2072b0SManish V Badarkhe ldr x16, [x0, #CTX_VSTCR_EL2] 405fb2072b0SManish V Badarkhe msr vstcr_el2, x16 4062825946eSMax Shvetsov 407fb2072b0SManish V Badarkhe ldr x17, [x0, #CTX_VSTTBR_EL2] 408fb2072b0SManish V Badarkhe msr vsttbr_el2, x17 4097f164a83SOlivier Deprez 410fb2072b0SManish V Badarkhe ldr x9, [x0, #CTX_TRFCR_EL2] 411fb2072b0SManish V Badarkhe msr TRFCR_EL2, x9 4122825946eSMax Shvetsov#endif 4132825946eSMax Shvetsov 4142825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 5) 415fb2072b0SManish V Badarkhe ldr x10, [x0, #CTX_SCXTNUM_EL2] 416fb2072b0SManish V Badarkhe msr scxtnum_el2, x10 4172825946eSMax Shvetsov#endif 41828f39f02SMax Shvetsov 41928f39f02SMax Shvetsov ret 42028f39f02SMax Shvetsovendfunc el2_sysregs_context_restore 42128f39f02SMax Shvetsov 42228f39f02SMax Shvetsov#endif /* CTX_INCLUDE_EL2_REGS */ 42328f39f02SMax Shvetsov 424ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 425ed108b56SAlexei Fedorov * The following function strictly follows the AArch64 PCS to use 426ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers) to save EL1 system 427ed108b56SAlexei Fedorov * register context. It assumes that 'x0' is pointing to a 428ed108b56SAlexei Fedorov * 'el1_sys_regs' structure where the register context will be saved. 429ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 430532ed618SSoby Mathew */ 431532ed618SSoby Mathewfunc el1_sysregs_context_save 432532ed618SSoby Mathew 433532ed618SSoby Mathew mrs x9, spsr_el1 434532ed618SSoby Mathew mrs x10, elr_el1 435532ed618SSoby Mathew stp x9, x10, [x0, #CTX_SPSR_EL1] 436532ed618SSoby Mathew 4373b8456bdSManish V Badarkhe#if !ERRATA_SPECULATIVE_AT 438532ed618SSoby Mathew mrs x15, sctlr_el1 439cb55615cSManish V Badarkhe mrs x16, tcr_el1 440532ed618SSoby Mathew stp x15, x16, [x0, #CTX_SCTLR_EL1] 4413b8456bdSManish V Badarkhe#endif 442532ed618SSoby Mathew 443532ed618SSoby Mathew mrs x17, cpacr_el1 444532ed618SSoby Mathew mrs x9, csselr_el1 445532ed618SSoby Mathew stp x17, x9, [x0, #CTX_CPACR_EL1] 446532ed618SSoby Mathew 447532ed618SSoby Mathew mrs x10, sp_el1 448532ed618SSoby Mathew mrs x11, esr_el1 449532ed618SSoby Mathew stp x10, x11, [x0, #CTX_SP_EL1] 450532ed618SSoby Mathew 451532ed618SSoby Mathew mrs x12, ttbr0_el1 452532ed618SSoby Mathew mrs x13, ttbr1_el1 453532ed618SSoby Mathew stp x12, x13, [x0, #CTX_TTBR0_EL1] 454532ed618SSoby Mathew 455532ed618SSoby Mathew mrs x14, mair_el1 456532ed618SSoby Mathew mrs x15, amair_el1 457532ed618SSoby Mathew stp x14, x15, [x0, #CTX_MAIR_EL1] 458532ed618SSoby Mathew 459cb55615cSManish V Badarkhe mrs x16, actlr_el1 460532ed618SSoby Mathew mrs x17, tpidr_el1 461cb55615cSManish V Badarkhe stp x16, x17, [x0, #CTX_ACTLR_EL1] 462532ed618SSoby Mathew 463532ed618SSoby Mathew mrs x9, tpidr_el0 464532ed618SSoby Mathew mrs x10, tpidrro_el0 465532ed618SSoby Mathew stp x9, x10, [x0, #CTX_TPIDR_EL0] 466532ed618SSoby Mathew 467532ed618SSoby Mathew mrs x13, par_el1 468532ed618SSoby Mathew mrs x14, far_el1 469532ed618SSoby Mathew stp x13, x14, [x0, #CTX_PAR_EL1] 470532ed618SSoby Mathew 471532ed618SSoby Mathew mrs x15, afsr0_el1 472532ed618SSoby Mathew mrs x16, afsr1_el1 473532ed618SSoby Mathew stp x15, x16, [x0, #CTX_AFSR0_EL1] 474532ed618SSoby Mathew 475532ed618SSoby Mathew mrs x17, contextidr_el1 476532ed618SSoby Mathew mrs x9, vbar_el1 477532ed618SSoby Mathew stp x17, x9, [x0, #CTX_CONTEXTIDR_EL1] 478532ed618SSoby Mathew 479532ed618SSoby Mathew /* Save AArch32 system registers if the build has instructed so */ 480532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS 481532ed618SSoby Mathew mrs x11, spsr_abt 482532ed618SSoby Mathew mrs x12, spsr_und 483532ed618SSoby Mathew stp x11, x12, [x0, #CTX_SPSR_ABT] 484532ed618SSoby Mathew 485532ed618SSoby Mathew mrs x13, spsr_irq 486532ed618SSoby Mathew mrs x14, spsr_fiq 487532ed618SSoby Mathew stp x13, x14, [x0, #CTX_SPSR_IRQ] 488532ed618SSoby Mathew 489532ed618SSoby Mathew mrs x15, dacr32_el2 490532ed618SSoby Mathew mrs x16, ifsr32_el2 491532ed618SSoby Mathew stp x15, x16, [x0, #CTX_DACR32_EL2] 492532ed618SSoby Mathew#endif 493532ed618SSoby Mathew 494532ed618SSoby Mathew /* Save NS timer registers if the build has instructed so */ 495532ed618SSoby Mathew#if NS_TIMER_SWITCH 496532ed618SSoby Mathew mrs x10, cntp_ctl_el0 497532ed618SSoby Mathew mrs x11, cntp_cval_el0 498532ed618SSoby Mathew stp x10, x11, [x0, #CTX_CNTP_CTL_EL0] 499532ed618SSoby Mathew 500532ed618SSoby Mathew mrs x12, cntv_ctl_el0 501532ed618SSoby Mathew mrs x13, cntv_cval_el0 502532ed618SSoby Mathew stp x12, x13, [x0, #CTX_CNTV_CTL_EL0] 503532ed618SSoby Mathew 504532ed618SSoby Mathew mrs x14, cntkctl_el1 505532ed618SSoby Mathew str x14, [x0, #CTX_CNTKCTL_EL1] 506532ed618SSoby Mathew#endif 507532ed618SSoby Mathew 5089dd94382SJustin Chadwell /* Save MTE system registers if the build has instructed so */ 5099dd94382SJustin Chadwell#if CTX_INCLUDE_MTE_REGS 5109dd94382SJustin Chadwell mrs x15, TFSRE0_EL1 5119dd94382SJustin Chadwell mrs x16, TFSR_EL1 5129dd94382SJustin Chadwell stp x15, x16, [x0, #CTX_TFSRE0_EL1] 5139dd94382SJustin Chadwell 5149dd94382SJustin Chadwell mrs x9, RGSR_EL1 5159dd94382SJustin Chadwell mrs x10, GCR_EL1 5169dd94382SJustin Chadwell stp x9, x10, [x0, #CTX_RGSR_EL1] 5179dd94382SJustin Chadwell#endif 5189dd94382SJustin Chadwell 519532ed618SSoby Mathew ret 520532ed618SSoby Mathewendfunc el1_sysregs_context_save 521532ed618SSoby Mathew 522ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 523ed108b56SAlexei Fedorov * The following function strictly follows the AArch64 PCS to use 524ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers) to restore EL1 system 525ed108b56SAlexei Fedorov * register context. It assumes that 'x0' is pointing to a 526ed108b56SAlexei Fedorov * 'el1_sys_regs' structure from where the register context will be 527ed108b56SAlexei Fedorov * restored 528ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 529532ed618SSoby Mathew */ 530532ed618SSoby Mathewfunc el1_sysregs_context_restore 531532ed618SSoby Mathew 532532ed618SSoby Mathew ldp x9, x10, [x0, #CTX_SPSR_EL1] 533532ed618SSoby Mathew msr spsr_el1, x9 534532ed618SSoby Mathew msr elr_el1, x10 535532ed618SSoby Mathew 5363b8456bdSManish V Badarkhe#if !ERRATA_SPECULATIVE_AT 537fb2072b0SManish V Badarkhe ldp x15, x16, [x0, #CTX_SCTLR_EL1] 538fb2072b0SManish V Badarkhe msr sctlr_el1, x15 539cb55615cSManish V Badarkhe msr tcr_el1, x16 5403b8456bdSManish V Badarkhe#endif 541532ed618SSoby Mathew 542532ed618SSoby Mathew ldp x17, x9, [x0, #CTX_CPACR_EL1] 543532ed618SSoby Mathew msr cpacr_el1, x17 544532ed618SSoby Mathew msr csselr_el1, x9 545532ed618SSoby Mathew 546532ed618SSoby Mathew ldp x10, x11, [x0, #CTX_SP_EL1] 547532ed618SSoby Mathew msr sp_el1, x10 548532ed618SSoby Mathew msr esr_el1, x11 549532ed618SSoby Mathew 550532ed618SSoby Mathew ldp x12, x13, [x0, #CTX_TTBR0_EL1] 551532ed618SSoby Mathew msr ttbr0_el1, x12 552532ed618SSoby Mathew msr ttbr1_el1, x13 553532ed618SSoby Mathew 554532ed618SSoby Mathew ldp x14, x15, [x0, #CTX_MAIR_EL1] 555532ed618SSoby Mathew msr mair_el1, x14 556532ed618SSoby Mathew msr amair_el1, x15 557532ed618SSoby Mathew 558cb55615cSManish V Badarkhe ldp x16, x17, [x0, #CTX_ACTLR_EL1] 559cb55615cSManish V Badarkhe msr actlr_el1, x16 560fb2072b0SManish V Badarkhe msr tpidr_el1, x17 561532ed618SSoby Mathew 562532ed618SSoby Mathew ldp x9, x10, [x0, #CTX_TPIDR_EL0] 563532ed618SSoby Mathew msr tpidr_el0, x9 564532ed618SSoby Mathew msr tpidrro_el0, x10 565532ed618SSoby Mathew 566532ed618SSoby Mathew ldp x13, x14, [x0, #CTX_PAR_EL1] 567532ed618SSoby Mathew msr par_el1, x13 568532ed618SSoby Mathew msr far_el1, x14 569532ed618SSoby Mathew 570532ed618SSoby Mathew ldp x15, x16, [x0, #CTX_AFSR0_EL1] 571532ed618SSoby Mathew msr afsr0_el1, x15 572532ed618SSoby Mathew msr afsr1_el1, x16 573532ed618SSoby Mathew 574532ed618SSoby Mathew ldp x17, x9, [x0, #CTX_CONTEXTIDR_EL1] 575532ed618SSoby Mathew msr contextidr_el1, x17 576532ed618SSoby Mathew msr vbar_el1, x9 577532ed618SSoby Mathew 578532ed618SSoby Mathew /* Restore AArch32 system registers if the build has instructed so */ 579532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS 580532ed618SSoby Mathew ldp x11, x12, [x0, #CTX_SPSR_ABT] 581532ed618SSoby Mathew msr spsr_abt, x11 582532ed618SSoby Mathew msr spsr_und, x12 583532ed618SSoby Mathew 584532ed618SSoby Mathew ldp x13, x14, [x0, #CTX_SPSR_IRQ] 585532ed618SSoby Mathew msr spsr_irq, x13 586532ed618SSoby Mathew msr spsr_fiq, x14 587532ed618SSoby Mathew 588532ed618SSoby Mathew ldp x15, x16, [x0, #CTX_DACR32_EL2] 589532ed618SSoby Mathew msr dacr32_el2, x15 590532ed618SSoby Mathew msr ifsr32_el2, x16 591532ed618SSoby Mathew#endif 592532ed618SSoby Mathew /* Restore NS timer registers if the build has instructed so */ 593532ed618SSoby Mathew#if NS_TIMER_SWITCH 594532ed618SSoby Mathew ldp x10, x11, [x0, #CTX_CNTP_CTL_EL0] 595532ed618SSoby Mathew msr cntp_ctl_el0, x10 596532ed618SSoby Mathew msr cntp_cval_el0, x11 597532ed618SSoby Mathew 598532ed618SSoby Mathew ldp x12, x13, [x0, #CTX_CNTV_CTL_EL0] 599532ed618SSoby Mathew msr cntv_ctl_el0, x12 600532ed618SSoby Mathew msr cntv_cval_el0, x13 601532ed618SSoby Mathew 602532ed618SSoby Mathew ldr x14, [x0, #CTX_CNTKCTL_EL1] 603532ed618SSoby Mathew msr cntkctl_el1, x14 604532ed618SSoby Mathew#endif 6059dd94382SJustin Chadwell /* Restore MTE system registers if the build has instructed so */ 6069dd94382SJustin Chadwell#if CTX_INCLUDE_MTE_REGS 6079dd94382SJustin Chadwell ldp x11, x12, [x0, #CTX_TFSRE0_EL1] 6089dd94382SJustin Chadwell msr TFSRE0_EL1, x11 6099dd94382SJustin Chadwell msr TFSR_EL1, x12 6109dd94382SJustin Chadwell 6119dd94382SJustin Chadwell ldp x13, x14, [x0, #CTX_RGSR_EL1] 6129dd94382SJustin Chadwell msr RGSR_EL1, x13 6139dd94382SJustin Chadwell msr GCR_EL1, x14 6149dd94382SJustin Chadwell#endif 615532ed618SSoby Mathew 616532ed618SSoby Mathew /* No explict ISB required here as ERET covers it */ 617532ed618SSoby Mathew ret 618532ed618SSoby Mathewendfunc el1_sysregs_context_restore 619532ed618SSoby Mathew 620ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 621ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use 622ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers according to AArch64 PCS) 623ed108b56SAlexei Fedorov * to save floating point register context. It assumes that 'x0' is 624ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure where the register context will 625532ed618SSoby Mathew * be saved. 626532ed618SSoby Mathew * 627ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set. 628ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in 629ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared. 630532ed618SSoby Mathew * 631532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world 632ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 633532ed618SSoby Mathew */ 634532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS 635532ed618SSoby Mathewfunc fpregs_context_save 636532ed618SSoby Mathew stp q0, q1, [x0, #CTX_FP_Q0] 637532ed618SSoby Mathew stp q2, q3, [x0, #CTX_FP_Q2] 638532ed618SSoby Mathew stp q4, q5, [x0, #CTX_FP_Q4] 639532ed618SSoby Mathew stp q6, q7, [x0, #CTX_FP_Q6] 640532ed618SSoby Mathew stp q8, q9, [x0, #CTX_FP_Q8] 641532ed618SSoby Mathew stp q10, q11, [x0, #CTX_FP_Q10] 642532ed618SSoby Mathew stp q12, q13, [x0, #CTX_FP_Q12] 643532ed618SSoby Mathew stp q14, q15, [x0, #CTX_FP_Q14] 644532ed618SSoby Mathew stp q16, q17, [x0, #CTX_FP_Q16] 645532ed618SSoby Mathew stp q18, q19, [x0, #CTX_FP_Q18] 646532ed618SSoby Mathew stp q20, q21, [x0, #CTX_FP_Q20] 647532ed618SSoby Mathew stp q22, q23, [x0, #CTX_FP_Q22] 648532ed618SSoby Mathew stp q24, q25, [x0, #CTX_FP_Q24] 649532ed618SSoby Mathew stp q26, q27, [x0, #CTX_FP_Q26] 650532ed618SSoby Mathew stp q28, q29, [x0, #CTX_FP_Q28] 651532ed618SSoby Mathew stp q30, q31, [x0, #CTX_FP_Q30] 652532ed618SSoby Mathew 653532ed618SSoby Mathew mrs x9, fpsr 654532ed618SSoby Mathew str x9, [x0, #CTX_FP_FPSR] 655532ed618SSoby Mathew 656532ed618SSoby Mathew mrs x10, fpcr 657532ed618SSoby Mathew str x10, [x0, #CTX_FP_FPCR] 658532ed618SSoby Mathew 65991089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS 66091089f36SDavid Cunado mrs x11, fpexc32_el2 66191089f36SDavid Cunado str x11, [x0, #CTX_FP_FPEXC32_EL2] 66291089f36SDavid Cunado#endif 663532ed618SSoby Mathew ret 664532ed618SSoby Mathewendfunc fpregs_context_save 665532ed618SSoby Mathew 666ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 667ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use x9-x17 668ed108b56SAlexei Fedorov * (temporary caller-saved registers according to AArch64 PCS) to 669ed108b56SAlexei Fedorov * restore floating point register context. It assumes that 'x0' is 670ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure from where the register context 671532ed618SSoby Mathew * will be restored. 672532ed618SSoby Mathew * 673ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set. 674ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in 675ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared. 676532ed618SSoby Mathew * 677532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world 678ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 679532ed618SSoby Mathew */ 680532ed618SSoby Mathewfunc fpregs_context_restore 681532ed618SSoby Mathew ldp q0, q1, [x0, #CTX_FP_Q0] 682532ed618SSoby Mathew ldp q2, q3, [x0, #CTX_FP_Q2] 683532ed618SSoby Mathew ldp q4, q5, [x0, #CTX_FP_Q4] 684532ed618SSoby Mathew ldp q6, q7, [x0, #CTX_FP_Q6] 685532ed618SSoby Mathew ldp q8, q9, [x0, #CTX_FP_Q8] 686532ed618SSoby Mathew ldp q10, q11, [x0, #CTX_FP_Q10] 687532ed618SSoby Mathew ldp q12, q13, [x0, #CTX_FP_Q12] 688532ed618SSoby Mathew ldp q14, q15, [x0, #CTX_FP_Q14] 689532ed618SSoby Mathew ldp q16, q17, [x0, #CTX_FP_Q16] 690532ed618SSoby Mathew ldp q18, q19, [x0, #CTX_FP_Q18] 691532ed618SSoby Mathew ldp q20, q21, [x0, #CTX_FP_Q20] 692532ed618SSoby Mathew ldp q22, q23, [x0, #CTX_FP_Q22] 693532ed618SSoby Mathew ldp q24, q25, [x0, #CTX_FP_Q24] 694532ed618SSoby Mathew ldp q26, q27, [x0, #CTX_FP_Q26] 695532ed618SSoby Mathew ldp q28, q29, [x0, #CTX_FP_Q28] 696532ed618SSoby Mathew ldp q30, q31, [x0, #CTX_FP_Q30] 697532ed618SSoby Mathew 698532ed618SSoby Mathew ldr x9, [x0, #CTX_FP_FPSR] 699532ed618SSoby Mathew msr fpsr, x9 700532ed618SSoby Mathew 701532ed618SSoby Mathew ldr x10, [x0, #CTX_FP_FPCR] 702532ed618SSoby Mathew msr fpcr, x10 703532ed618SSoby Mathew 70491089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS 70591089f36SDavid Cunado ldr x11, [x0, #CTX_FP_FPEXC32_EL2] 70691089f36SDavid Cunado msr fpexc32_el2, x11 70791089f36SDavid Cunado#endif 708532ed618SSoby Mathew /* 709532ed618SSoby Mathew * No explict ISB required here as ERET to 710532ed618SSoby Mathew * switch to secure EL1 or non-secure world 711532ed618SSoby Mathew * covers it 712532ed618SSoby Mathew */ 713532ed618SSoby Mathew 714532ed618SSoby Mathew ret 715532ed618SSoby Mathewendfunc fpregs_context_restore 716532ed618SSoby Mathew#endif /* CTX_INCLUDE_FPREGS */ 717532ed618SSoby Mathew 718ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 719ed108b56SAlexei Fedorov * The following function is used to save and restore all the general 720ed108b56SAlexei Fedorov * purpose and ARMv8.3-PAuth (if enabled) registers. 721ed108b56SAlexei Fedorov * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3 722ed108b56SAlexei Fedorov * when ARMv8.5-PMU is implemented, and if called from Non-secure 723ed108b56SAlexei Fedorov * state saves PMCR_EL0 and disables Cycle Counter. 724ed108b56SAlexei Fedorov * 725ed108b56SAlexei Fedorov * Ideally we would only save and restore the callee saved registers 726ed108b56SAlexei Fedorov * when a world switch occurs but that type of implementation is more 727ed108b56SAlexei Fedorov * complex. So currently we will always save and restore these 728ed108b56SAlexei Fedorov * registers on entry and exit of EL3. 729ed108b56SAlexei Fedorov * These are not macros to ensure their invocation fits within the 32 730ed108b56SAlexei Fedorov * instructions per exception vector. 731532ed618SSoby Mathew * clobbers: x18 732ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 733532ed618SSoby Mathew */ 734ed108b56SAlexei Fedorovfunc save_gp_pmcr_pauth_regs 735532ed618SSoby Mathew stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 736532ed618SSoby Mathew stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 737532ed618SSoby Mathew stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 738532ed618SSoby Mathew stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] 739532ed618SSoby Mathew stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] 740532ed618SSoby Mathew stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] 741532ed618SSoby Mathew stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] 742532ed618SSoby Mathew stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] 743532ed618SSoby Mathew stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] 744532ed618SSoby Mathew stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] 745532ed618SSoby Mathew stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] 746532ed618SSoby Mathew stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] 747532ed618SSoby Mathew stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] 748532ed618SSoby Mathew stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] 749532ed618SSoby Mathew stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] 750532ed618SSoby Mathew mrs x18, sp_el0 751532ed618SSoby Mathew str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] 752532ed618SSoby Mathew 753ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 754ed108b56SAlexei Fedorov * Check if earlier initialization MDCR_EL3.SCCD to 1 failed, 755ed108b56SAlexei Fedorov * meaning that ARMv8-PMU is not implemented and PMCR_EL0 756ed108b56SAlexei Fedorov * should be saved in non-secure context. 757ed108b56SAlexei Fedorov * ---------------------------------------------------------- 758ef653d93SJeenu Viswambharan */ 759ed108b56SAlexei Fedorov mrs x9, mdcr_el3 760ed108b56SAlexei Fedorov tst x9, #MDCR_SCCD_BIT 761ed108b56SAlexei Fedorov bne 1f 762ed108b56SAlexei Fedorov 763ed108b56SAlexei Fedorov /* Secure Cycle Counter is not disabled */ 764ed108b56SAlexei Fedorov mrs x9, pmcr_el0 765ed108b56SAlexei Fedorov 766ed108b56SAlexei Fedorov /* Check caller's security state */ 767ed108b56SAlexei Fedorov mrs x10, scr_el3 768ed108b56SAlexei Fedorov tst x10, #SCR_NS_BIT 769ed108b56SAlexei Fedorov beq 2f 770ed108b56SAlexei Fedorov 771ed108b56SAlexei Fedorov /* Save PMCR_EL0 if called from Non-secure state */ 772ed108b56SAlexei Fedorov str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] 773ed108b56SAlexei Fedorov 774ed108b56SAlexei Fedorov /* Disable cycle counter when event counting is prohibited */ 775ed108b56SAlexei Fedorov2: orr x9, x9, #PMCR_EL0_DP_BIT 776ed108b56SAlexei Fedorov msr pmcr_el0, x9 777ed108b56SAlexei Fedorov isb 778ed108b56SAlexei Fedorov1: 779ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS 780ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 781ed108b56SAlexei Fedorov * Save the ARMv8.3-PAuth keys as they are not banked 782ed108b56SAlexei Fedorov * by exception level 783ed108b56SAlexei Fedorov * ---------------------------------------------------------- 784ed108b56SAlexei Fedorov */ 785ed108b56SAlexei Fedorov add x19, sp, #CTX_PAUTH_REGS_OFFSET 786ed108b56SAlexei Fedorov 787ed108b56SAlexei Fedorov mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */ 788ed108b56SAlexei Fedorov mrs x21, APIAKeyHi_EL1 789ed108b56SAlexei Fedorov mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */ 790ed108b56SAlexei Fedorov mrs x23, APIBKeyHi_EL1 791ed108b56SAlexei Fedorov mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */ 792ed108b56SAlexei Fedorov mrs x25, APDAKeyHi_EL1 793ed108b56SAlexei Fedorov mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */ 794ed108b56SAlexei Fedorov mrs x27, APDBKeyHi_EL1 795ed108b56SAlexei Fedorov mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */ 796ed108b56SAlexei Fedorov mrs x29, APGAKeyHi_EL1 797ed108b56SAlexei Fedorov 798ed108b56SAlexei Fedorov stp x20, x21, [x19, #CTX_PACIAKEY_LO] 799ed108b56SAlexei Fedorov stp x22, x23, [x19, #CTX_PACIBKEY_LO] 800ed108b56SAlexei Fedorov stp x24, x25, [x19, #CTX_PACDAKEY_LO] 801ed108b56SAlexei Fedorov stp x26, x27, [x19, #CTX_PACDBKEY_LO] 802ed108b56SAlexei Fedorov stp x28, x29, [x19, #CTX_PACGAKEY_LO] 803ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */ 804ed108b56SAlexei Fedorov 805ed108b56SAlexei Fedorov ret 806ed108b56SAlexei Fedorovendfunc save_gp_pmcr_pauth_regs 807ed108b56SAlexei Fedorov 808ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 809ed108b56SAlexei Fedorov * This function restores ARMv8.3-PAuth (if enabled) and all general 810ed108b56SAlexei Fedorov * purpose registers except x30 from the CPU context. 811ed108b56SAlexei Fedorov * x30 register must be explicitly restored by the caller. 812ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 813ed108b56SAlexei Fedorov */ 814ed108b56SAlexei Fedorovfunc restore_gp_pmcr_pauth_regs 815ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS 816ed108b56SAlexei Fedorov /* Restore the ARMv8.3 PAuth keys */ 817ed108b56SAlexei Fedorov add x10, sp, #CTX_PAUTH_REGS_OFFSET 818ed108b56SAlexei Fedorov 819ed108b56SAlexei Fedorov ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */ 820ed108b56SAlexei Fedorov ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */ 821ed108b56SAlexei Fedorov ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */ 822ed108b56SAlexei Fedorov ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */ 823ed108b56SAlexei Fedorov ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */ 824ed108b56SAlexei Fedorov 825ed108b56SAlexei Fedorov msr APIAKeyLo_EL1, x0 826ed108b56SAlexei Fedorov msr APIAKeyHi_EL1, x1 827ed108b56SAlexei Fedorov msr APIBKeyLo_EL1, x2 828ed108b56SAlexei Fedorov msr APIBKeyHi_EL1, x3 829ed108b56SAlexei Fedorov msr APDAKeyLo_EL1, x4 830ed108b56SAlexei Fedorov msr APDAKeyHi_EL1, x5 831ed108b56SAlexei Fedorov msr APDBKeyLo_EL1, x6 832ed108b56SAlexei Fedorov msr APDBKeyHi_EL1, x7 833ed108b56SAlexei Fedorov msr APGAKeyLo_EL1, x8 834ed108b56SAlexei Fedorov msr APGAKeyHi_EL1, x9 835ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */ 836ed108b56SAlexei Fedorov 837ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 838ed108b56SAlexei Fedorov * Restore PMCR_EL0 when returning to Non-secure state if 839ed108b56SAlexei Fedorov * Secure Cycle Counter is not disabled in MDCR_EL3 when 840ed108b56SAlexei Fedorov * ARMv8.5-PMU is implemented. 841ed108b56SAlexei Fedorov * ---------------------------------------------------------- 842ed108b56SAlexei Fedorov */ 843ed108b56SAlexei Fedorov mrs x0, scr_el3 844ed108b56SAlexei Fedorov tst x0, #SCR_NS_BIT 845ed108b56SAlexei Fedorov beq 2f 846ed108b56SAlexei Fedorov 847ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 848ed108b56SAlexei Fedorov * Back to Non-secure state. 849ed108b56SAlexei Fedorov * Check if earlier initialization MDCR_EL3.SCCD to 1 failed, 850ed108b56SAlexei Fedorov * meaning that ARMv8-PMU is not implemented and PMCR_EL0 851ed108b56SAlexei Fedorov * should be restored from non-secure context. 852ed108b56SAlexei Fedorov * ---------------------------------------------------------- 853ed108b56SAlexei Fedorov */ 854ed108b56SAlexei Fedorov mrs x0, mdcr_el3 855ed108b56SAlexei Fedorov tst x0, #MDCR_SCCD_BIT 856ed108b56SAlexei Fedorov bne 2f 857ed108b56SAlexei Fedorov ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] 858ed108b56SAlexei Fedorov msr pmcr_el0, x0 859ed108b56SAlexei Fedorov2: 860532ed618SSoby Mathew ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 861532ed618SSoby Mathew ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 862532ed618SSoby Mathew ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 863532ed618SSoby Mathew ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] 864532ed618SSoby Mathew ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] 865532ed618SSoby Mathew ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] 866532ed618SSoby Mathew ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] 867532ed618SSoby Mathew ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] 868ef653d93SJeenu Viswambharan ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] 869532ed618SSoby Mathew ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] 870532ed618SSoby Mathew ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] 871532ed618SSoby Mathew ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] 872532ed618SSoby Mathew ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] 873532ed618SSoby Mathew ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] 874ef653d93SJeenu Viswambharan ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] 875ef653d93SJeenu Viswambharan msr sp_el0, x28 876532ed618SSoby Mathew ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] 877ef653d93SJeenu Viswambharan ret 878ed108b56SAlexei Fedorovendfunc restore_gp_pmcr_pauth_regs 879ef653d93SJeenu Viswambharan 8803b8456bdSManish V Badarkhe/* 8813b8456bdSManish V Badarkhe * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1 8823b8456bdSManish V Badarkhe * registers and update EL1 registers to disable stage1 and stage2 8833b8456bdSManish V Badarkhe * page table walk 8843b8456bdSManish V Badarkhe */ 8853b8456bdSManish V Badarkhefunc save_and_update_ptw_el1_sys_regs 8863b8456bdSManish V Badarkhe /* ---------------------------------------------------------- 8873b8456bdSManish V Badarkhe * Save only sctlr_el1 and tcr_el1 registers 8883b8456bdSManish V Badarkhe * ---------------------------------------------------------- 8893b8456bdSManish V Badarkhe */ 8903b8456bdSManish V Badarkhe mrs x29, sctlr_el1 8913b8456bdSManish V Badarkhe str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)] 8923b8456bdSManish V Badarkhe mrs x29, tcr_el1 8933b8456bdSManish V Badarkhe str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)] 8943b8456bdSManish V Badarkhe 8953b8456bdSManish V Badarkhe /* ------------------------------------------------------------ 8963b8456bdSManish V Badarkhe * Must follow below order in order to disable page table 8973b8456bdSManish V Badarkhe * walk for lower ELs (EL1 and EL0). First step ensures that 8983b8456bdSManish V Badarkhe * page table walk is disabled for stage1 and second step 8993b8456bdSManish V Badarkhe * ensures that page table walker should use TCR_EL1.EPDx 9003b8456bdSManish V Badarkhe * bits to perform address translation. ISB ensures that CPU 9013b8456bdSManish V Badarkhe * does these 2 steps in order. 9023b8456bdSManish V Badarkhe * 9033b8456bdSManish V Badarkhe * 1. Update TCR_EL1.EPDx bits to disable page table walk by 9043b8456bdSManish V Badarkhe * stage1. 9053b8456bdSManish V Badarkhe * 2. Enable MMU bit to avoid identity mapping via stage2 9063b8456bdSManish V Badarkhe * and force TCR_EL1.EPDx to be used by the page table 9073b8456bdSManish V Badarkhe * walker. 9083b8456bdSManish V Badarkhe * ------------------------------------------------------------ 9093b8456bdSManish V Badarkhe */ 9103b8456bdSManish V Badarkhe orr x29, x29, #(TCR_EPD0_BIT) 9113b8456bdSManish V Badarkhe orr x29, x29, #(TCR_EPD1_BIT) 9123b8456bdSManish V Badarkhe msr tcr_el1, x29 9133b8456bdSManish V Badarkhe isb 9143b8456bdSManish V Badarkhe mrs x29, sctlr_el1 9153b8456bdSManish V Badarkhe orr x29, x29, #SCTLR_M_BIT 9163b8456bdSManish V Badarkhe msr sctlr_el1, x29 9173b8456bdSManish V Badarkhe isb 9183b8456bdSManish V Badarkhe 9193b8456bdSManish V Badarkhe ret 9203b8456bdSManish V Badarkheendfunc save_and_update_ptw_el1_sys_regs 9213b8456bdSManish V Badarkhe 922ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 923ed108b56SAlexei Fedorov * This routine assumes that the SP_EL3 is pointing to a valid 924ed108b56SAlexei Fedorov * context structure from where the gp regs and other special 925ed108b56SAlexei Fedorov * registers can be retrieved. 926ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 927532ed618SSoby Mathew */ 928532ed618SSoby Mathewfunc el3_exit 929bb9549baSJan Dabros#if ENABLE_ASSERTIONS 930bb9549baSJan Dabros /* el3_exit assumes SP_EL0 on entry */ 931bb9549baSJan Dabros mrs x17, spsel 932bb9549baSJan Dabros cmp x17, #MODE_SP_EL0 933bb9549baSJan Dabros ASM_ASSERT(eq) 934bb9549baSJan Dabros#endif 935bb9549baSJan Dabros 936ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 937ed108b56SAlexei Fedorov * Save the current SP_EL0 i.e. the EL3 runtime stack which 938ed108b56SAlexei Fedorov * will be used for handling the next SMC. 939ed108b56SAlexei Fedorov * Then switch to SP_EL3. 940ed108b56SAlexei Fedorov * ---------------------------------------------------------- 941532ed618SSoby Mathew */ 942532ed618SSoby Mathew mov x17, sp 943ed108b56SAlexei Fedorov msr spsel, #MODE_SP_ELX 944532ed618SSoby Mathew str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] 945532ed618SSoby Mathew 946ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 947532ed618SSoby Mathew * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET 948ed108b56SAlexei Fedorov * ---------------------------------------------------------- 949532ed618SSoby Mathew */ 950532ed618SSoby Mathew ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] 951532ed618SSoby Mathew ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] 952532ed618SSoby Mathew msr scr_el3, x18 953532ed618SSoby Mathew msr spsr_el3, x16 954532ed618SSoby Mathew msr elr_el3, x17 955532ed618SSoby Mathew 956fe007b2eSDimitris Papastamos#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 957ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 958ed108b56SAlexei Fedorov * Restore mitigation state as it was on entry to EL3 959ed108b56SAlexei Fedorov * ---------------------------------------------------------- 960ed108b56SAlexei Fedorov */ 961fe007b2eSDimitris Papastamos ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE] 962ed108b56SAlexei Fedorov cbz x17, 1f 963fe007b2eSDimitris Papastamos blr x17 9644d1ccf0eSAntonio Nino Diaz1: 965fe007b2eSDimitris Papastamos#endif 9663b8456bdSManish V Badarkhe restore_ptw_el1_sys_regs 9673b8456bdSManish V Badarkhe 968ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 969ed108b56SAlexei Fedorov * Restore general purpose (including x30), PMCR_EL0 and 970ed108b56SAlexei Fedorov * ARMv8.3-PAuth registers. 971ed108b56SAlexei Fedorov * Exit EL3 via ERET to a lower exception level. 972ed108b56SAlexei Fedorov * ---------------------------------------------------------- 973ed108b56SAlexei Fedorov */ 974ed108b56SAlexei Fedorov bl restore_gp_pmcr_pauth_regs 975ed108b56SAlexei Fedorov ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 976fe007b2eSDimitris Papastamos 977ed108b56SAlexei Fedorov#if IMAGE_BL31 && RAS_EXTENSION 978ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 979ed108b56SAlexei Fedorov * Issue Error Synchronization Barrier to synchronize SErrors 980ed108b56SAlexei Fedorov * before exiting EL3. We're running with EAs unmasked, so 981ed108b56SAlexei Fedorov * any synchronized errors would be taken immediately; 982ed108b56SAlexei Fedorov * therefore no need to inspect DISR_EL1 register. 983ed108b56SAlexei Fedorov * ---------------------------------------------------------- 984ed108b56SAlexei Fedorov */ 985ed108b56SAlexei Fedorov esb 9865283962eSAntonio Nino Diaz#endif 987f461fe34SAnthony Steinhauser exception_return 9885283962eSAntonio Nino Diaz 989532ed618SSoby Mathewendfunc el3_exit 990