1532ed618SSoby Mathew/* 228f39f02SMax Shvetsov * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved. 3532ed618SSoby Mathew * 482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause 5532ed618SSoby Mathew */ 6532ed618SSoby Mathew 7532ed618SSoby Mathew#include <arch.h> 8532ed618SSoby Mathew#include <asm_macros.S> 9bb9549baSJan Dabros#include <assert_macros.S> 10532ed618SSoby Mathew#include <context.h> 11532ed618SSoby Mathew 1228f39f02SMax Shvetsov#if CTX_INCLUDE_EL2_REGS 1328f39f02SMax Shvetsov .global el2_sysregs_context_save 1428f39f02SMax Shvetsov .global el2_sysregs_context_restore 1528f39f02SMax Shvetsov#endif 1628f39f02SMax Shvetsov 17532ed618SSoby Mathew .global el1_sysregs_context_save 18532ed618SSoby Mathew .global el1_sysregs_context_restore 19532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS 20532ed618SSoby Mathew .global fpregs_context_save 21532ed618SSoby Mathew .global fpregs_context_restore 22532ed618SSoby Mathew#endif 23ed108b56SAlexei Fedorov .global save_gp_pmcr_pauth_regs 24ed108b56SAlexei Fedorov .global restore_gp_pmcr_pauth_regs 25532ed618SSoby Mathew .global el3_exit 26532ed618SSoby Mathew 2728f39f02SMax Shvetsov#if CTX_INCLUDE_EL2_REGS 2828f39f02SMax Shvetsov 2928f39f02SMax Shvetsov/* ----------------------------------------------------- 3028f39f02SMax Shvetsov * The following function strictly follows the AArch64 3128f39f02SMax Shvetsov * PCS to use x9-x17 (temporary caller-saved registers) 322825946eSMax Shvetsov * to save EL2 system register context. It assumes that 332825946eSMax Shvetsov * 'x0' is pointing to a 'el2_sys_regs' structure where 3428f39f02SMax Shvetsov * the register context will be saved. 352825946eSMax Shvetsov * 362825946eSMax Shvetsov * The following registers are not added. 372825946eSMax Shvetsov * AMEVCNTVOFF0<n>_EL2 382825946eSMax Shvetsov * AMEVCNTVOFF1<n>_EL2 392825946eSMax Shvetsov * ICH_AP0R<n>_EL2 402825946eSMax Shvetsov * ICH_AP1R<n>_EL2 412825946eSMax Shvetsov * ICH_LR<n>_EL2 4228f39f02SMax Shvetsov * ----------------------------------------------------- 4328f39f02SMax Shvetsov */ 442825946eSMax Shvetsov 4528f39f02SMax Shvetsovfunc el2_sysregs_context_save 4628f39f02SMax Shvetsov mrs x9, actlr_el2 472825946eSMax Shvetsov mrs x10, afsr0_el2 482825946eSMax Shvetsov stp x9, x10, [x0, #CTX_ACTLR_EL2] 4928f39f02SMax Shvetsov 502825946eSMax Shvetsov mrs x11, afsr1_el2 512825946eSMax Shvetsov mrs x12, amair_el2 522825946eSMax Shvetsov stp x11, x12, [x0, #CTX_AFSR1_EL2] 5328f39f02SMax Shvetsov 542825946eSMax Shvetsov mrs x13, cnthctl_el2 552825946eSMax Shvetsov mrs x14, cnthp_ctl_el2 562825946eSMax Shvetsov stp x13, x14, [x0, #CTX_CNTHCTL_EL2] 5728f39f02SMax Shvetsov 582825946eSMax Shvetsov mrs x15, cnthp_cval_el2 592825946eSMax Shvetsov mrs x16, cnthp_tval_el2 602825946eSMax Shvetsov stp x15, x16, [x0, #CTX_CNTHP_CVAL_EL2] 6128f39f02SMax Shvetsov 622825946eSMax Shvetsov mrs x17, cntvoff_el2 6328f39f02SMax Shvetsov mrs x9, cptr_el2 642825946eSMax Shvetsov stp x17, x9, [x0, #CTX_CNTVOFF_EL2] 6528f39f02SMax Shvetsov 662825946eSMax Shvetsov mrs x10, dbgvcr32_el2 672825946eSMax Shvetsov mrs x11, elr_el2 682825946eSMax Shvetsov stp x10, x11, [x0, #CTX_DBGVCR32_EL2] 6928f39f02SMax Shvetsov 702825946eSMax Shvetsov mrs x14, esr_el2 712825946eSMax Shvetsov mrs x15, far_el2 722825946eSMax Shvetsov stp x14, x15, [x0, #CTX_ESR_EL2] 7328f39f02SMax Shvetsov 742825946eSMax Shvetsov mrs x16, fpexc32_el2 752825946eSMax Shvetsov mrs x17, hacr_el2 762825946eSMax Shvetsov stp x16, x17, [x0, #CTX_FPEXC32_EL2] 7728f39f02SMax Shvetsov 7828f39f02SMax Shvetsov mrs x9, hcr_el2 792825946eSMax Shvetsov mrs x10, hpfar_el2 802825946eSMax Shvetsov stp x9, x10, [x0, #CTX_HCR_EL2] 8128f39f02SMax Shvetsov 822825946eSMax Shvetsov mrs x11, hstr_el2 832825946eSMax Shvetsov mrs x12, ICC_SRE_EL2 842825946eSMax Shvetsov stp x11, x12, [x0, #CTX_HSTR_EL2] 8528f39f02SMax Shvetsov 862825946eSMax Shvetsov mrs x13, ICH_HCR_EL2 872825946eSMax Shvetsov mrs x14, ICH_VMCR_EL2 882825946eSMax Shvetsov stp x13, x14, [x0, #CTX_ICH_HCR_EL2] 8928f39f02SMax Shvetsov 902825946eSMax Shvetsov mrs x15, mair_el2 912825946eSMax Shvetsov mrs x16, mdcr_el2 922825946eSMax Shvetsov stp x15, x16, [x0, #CTX_MAIR_EL2] 9328f39f02SMax Shvetsov 942825946eSMax Shvetsov mrs x17, PMSCR_EL2 9528f39f02SMax Shvetsov mrs x9, sctlr_el2 962825946eSMax Shvetsov stp x17, x9, [x0, #CTX_PMSCR_EL2] 9728f39f02SMax Shvetsov 982825946eSMax Shvetsov mrs x10, spsr_el2 992825946eSMax Shvetsov mrs x11, sp_el2 1002825946eSMax Shvetsov stp x10, x11, [x0, #CTX_SPSR_EL2] 10128f39f02SMax Shvetsov 1022825946eSMax Shvetsov mrs x12, tcr_el2 1037f164a83SOlivier Deprez mrs x13, tpidr_el2 1042825946eSMax Shvetsov stp x12, x13, [x0, #CTX_TCR_EL2] 10528f39f02SMax Shvetsov 1062825946eSMax Shvetsov mrs x14, ttbr0_el2 1072825946eSMax Shvetsov mrs x15, vbar_el2 1082825946eSMax Shvetsov stp x14, x15, [x0, #CTX_TTBR0_EL2] 10928f39f02SMax Shvetsov 1102825946eSMax Shvetsov mrs x16, vmpidr_el2 1112825946eSMax Shvetsov mrs x17, vpidr_el2 1122825946eSMax Shvetsov stp x16, x17, [x0, #CTX_VMPIDR_EL2] 11328f39f02SMax Shvetsov 11428f39f02SMax Shvetsov mrs x9, vtcr_el2 1152825946eSMax Shvetsov mrs x10, vttbr_el2 1162825946eSMax Shvetsov stp x9, x10, [x0, #CTX_VTCR_EL2] 11728f39f02SMax Shvetsov 1182825946eSMax Shvetsov#if CTX_INCLUDE_MTE_REGS 1192825946eSMax Shvetsov mrs x11, TFSR_EL2 1202825946eSMax Shvetsov str x11, [x0, #CTX_TFSR_EL2] 1212825946eSMax Shvetsov#endif 12228f39f02SMax Shvetsov 1232825946eSMax Shvetsov#if ENABLE_MPAM_FOR_LOWER_ELS 1242825946eSMax Shvetsov mrs x9, MPAM2_EL2 1252825946eSMax Shvetsov mrs x10, MPAMHCR_EL2 1262825946eSMax Shvetsov stp x9, x10, [x0, #CTX_MPAM2_EL2] 1272825946eSMax Shvetsov 1282825946eSMax Shvetsov mrs x11, MPAMVPM0_EL2 1292825946eSMax Shvetsov mrs x12, MPAMVPM1_EL2 1302825946eSMax Shvetsov stp x11, x12, [x0, #CTX_MPAMVPM0_EL2] 1312825946eSMax Shvetsov 1322825946eSMax Shvetsov mrs x13, MPAMVPM2_EL2 1332825946eSMax Shvetsov mrs x14, MPAMVPM3_EL2 1342825946eSMax Shvetsov stp x13, x14, [x0, #CTX_MPAMVPM2_EL2] 1352825946eSMax Shvetsov 1362825946eSMax Shvetsov mrs x15, MPAMVPM4_EL2 1372825946eSMax Shvetsov mrs x16, MPAMVPM5_EL2 1382825946eSMax Shvetsov stp x15, x16, [x0, #CTX_MPAMVPM4_EL2] 1392825946eSMax Shvetsov 1402825946eSMax Shvetsov mrs x17, MPAMVPM6_EL2 1412825946eSMax Shvetsov mrs x9, MPAMVPM7_EL2 1422825946eSMax Shvetsov stp x17, x9, [x0, #CTX_MPAMVPM6_EL2] 1432825946eSMax Shvetsov 1442825946eSMax Shvetsov mrs x10, MPAMVPMV_EL2 1452825946eSMax Shvetsov str x10, [x0, #CTX_MPAMVPMV_EL2] 1462825946eSMax Shvetsov#endif 1472825946eSMax Shvetsov 1482825946eSMax Shvetsov 1492825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 6) 1502825946eSMax Shvetsov mrs x11, HAFGRTR_EL2 1512825946eSMax Shvetsov mrs x12, HDFGRTR_EL2 1522825946eSMax Shvetsov stp x11, x12, [x0, #CTX_HAFGRTR_EL2] 1532825946eSMax Shvetsov 1542825946eSMax Shvetsov mrs x13, HDFGWTR_EL2 1552825946eSMax Shvetsov mrs x14, HFGITR_EL2 1562825946eSMax Shvetsov stp x13, x14, [x0, #CTX_HDFGWTR_EL2] 1572825946eSMax Shvetsov 1582825946eSMax Shvetsov mrs x15, HFGRTR_EL2 1592825946eSMax Shvetsov mrs x16, HFGWTR_EL2 1602825946eSMax Shvetsov stp x15, x16, [x0, #CTX_HFGRTR_EL2] 1612825946eSMax Shvetsov 1622825946eSMax Shvetsov mrs x17, CNTPOFF_EL2 1632825946eSMax Shvetsov str x17, [x0, #CTX_CNTPOFF_EL2] 1642825946eSMax Shvetsov#endif 1652825946eSMax Shvetsov 1662825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 4) 1672825946eSMax Shvetsov mrs x9, cnthps_ctl_el2 1682825946eSMax Shvetsov mrs x10, cnthps_cval_el2 1692825946eSMax Shvetsov stp x9, x10, [x0, #CTX_CNTHPS_CTL_EL2] 1702825946eSMax Shvetsov 1712825946eSMax Shvetsov mrs x11, cnthps_tval_el2 1722825946eSMax Shvetsov mrs x12, cnthvs_ctl_el2 1732825946eSMax Shvetsov stp x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2] 1742825946eSMax Shvetsov 1752825946eSMax Shvetsov mrs x13, cnthvs_cval_el2 1762825946eSMax Shvetsov mrs x14, cnthvs_tval_el2 1772825946eSMax Shvetsov stp x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2] 1782825946eSMax Shvetsov 1792825946eSMax Shvetsov mrs x15, cnthv_ctl_el2 1802825946eSMax Shvetsov mrs x16, cnthv_cval_el2 1812825946eSMax Shvetsov stp x15, x16, [x0, #CTX_CNTHV_CTL_EL2] 1822825946eSMax Shvetsov 1832825946eSMax Shvetsov mrs x17, cnthv_tval_el2 1842825946eSMax Shvetsov mrs x9, contextidr_el2 1852825946eSMax Shvetsov stp x17, x9, [x0, #CTX_CNTHV_TVAL_EL2] 1862825946eSMax Shvetsov 1872825946eSMax Shvetsov mrs x10, sder32_el2 1882825946eSMax Shvetsov str x10, [x0, #CTX_SDER32_EL2] 1892825946eSMax Shvetsov 1902825946eSMax Shvetsov mrs x11, ttbr1_el2 1912825946eSMax Shvetsov str x11, [x0, #CTX_TTBR1_EL2] 1922825946eSMax Shvetsov 1932825946eSMax Shvetsov mrs x12, vdisr_el2 1942825946eSMax Shvetsov str x12, [x0, #CTX_VDISR_EL2] 1952825946eSMax Shvetsov 1962825946eSMax Shvetsov mrs x13, vncr_el2 1972825946eSMax Shvetsov str x13, [x0, #CTX_VNCR_EL2] 1982825946eSMax Shvetsov 1992825946eSMax Shvetsov mrs x14, vsesr_el2 2002825946eSMax Shvetsov str x14, [x0, #CTX_VSESR_EL2] 2012825946eSMax Shvetsov 2022825946eSMax Shvetsov mrs x15, vstcr_el2 2032825946eSMax Shvetsov str x15, [x0, #CTX_VSTCR_EL2] 2042825946eSMax Shvetsov 2052825946eSMax Shvetsov mrs x16, vsttbr_el2 2062825946eSMax Shvetsov str x16, [x0, #CTX_VSTTBR_EL2] 2077f164a83SOlivier Deprez 2087f164a83SOlivier Deprez mrs x17, TRFCR_EL2 2097f164a83SOlivier Deprez str x17, [x0, #CTX_TRFCR_EL2] 2102825946eSMax Shvetsov#endif 2112825946eSMax Shvetsov 2122825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 5) 2137f164a83SOlivier Deprez mrs x9, scxtnum_el2 2147f164a83SOlivier Deprez str x9, [x0, #CTX_SCXTNUM_EL2] 2152825946eSMax Shvetsov#endif 21628f39f02SMax Shvetsov 21728f39f02SMax Shvetsov ret 21828f39f02SMax Shvetsovendfunc el2_sysregs_context_save 21928f39f02SMax Shvetsov 22028f39f02SMax Shvetsov/* ----------------------------------------------------- 22128f39f02SMax Shvetsov * The following function strictly follows the AArch64 22228f39f02SMax Shvetsov * PCS to use x9-x17 (temporary caller-saved registers) 2232825946eSMax Shvetsov * to restore EL2 system register context. It assumes 2242825946eSMax Shvetsov * that 'x0' is pointing to a 'el2_sys_regs' structure 22528f39f02SMax Shvetsov * from where the register context will be restored 2262825946eSMax Shvetsov 2272825946eSMax Shvetsov * The following registers are not restored 2282825946eSMax Shvetsov * AMEVCNTVOFF0<n>_EL2 2292825946eSMax Shvetsov * AMEVCNTVOFF1<n>_EL2 2302825946eSMax Shvetsov * ICH_AP0R<n>_EL2 2312825946eSMax Shvetsov * ICH_AP1R<n>_EL2 2322825946eSMax Shvetsov * ICH_LR<n>_EL2 23328f39f02SMax Shvetsov * ----------------------------------------------------- 23428f39f02SMax Shvetsov */ 23528f39f02SMax Shvetsovfunc el2_sysregs_context_restore 23628f39f02SMax Shvetsov 237*45aecff0SManish V Badarkhe#if ERRATA_SPECULATIVE_AT 238*45aecff0SManish V Badarkhe/* Clear EPD0 and EPD1 bit and M bit to disable PTW */ 239*45aecff0SManish V Badarkhe mrs x9, hcr_el2 240*45aecff0SManish V Badarkhe tst x9, #HCR_E2H_BIT 241*45aecff0SManish V Badarkhe bne 1f 242*45aecff0SManish V Badarkhe mrs x9, tcr_el2 243*45aecff0SManish V Badarkhe orr x9, x9, #TCR_EPD0_BIT 244*45aecff0SManish V Badarkhe orr x9, x9, #TCR_EPD1_BIT 245*45aecff0SManish V Badarkhe msr tcr_el2, x9 246*45aecff0SManish V Badarkhe1: mrs x9, sctlr_el2 247*45aecff0SManish V Badarkhe bic x9, x9, #SCTLR_M_BIT 248*45aecff0SManish V Badarkhe msr sctlr_el2, x9 249*45aecff0SManish V Badarkhe isb 250*45aecff0SManish V Badarkhe#endif 251*45aecff0SManish V Badarkhe 2522825946eSMax Shvetsov ldp x9, x10, [x0, #CTX_ACTLR_EL2] 25328f39f02SMax Shvetsov msr actlr_el2, x9 2542825946eSMax Shvetsov msr afsr0_el2, x10 25528f39f02SMax Shvetsov 2562825946eSMax Shvetsov ldp x11, x12, [x0, #CTX_AFSR1_EL2] 2572825946eSMax Shvetsov msr afsr1_el2, x11 2582825946eSMax Shvetsov msr amair_el2, x12 25928f39f02SMax Shvetsov 2602825946eSMax Shvetsov ldp x13, x14, [x0, #CTX_CNTHCTL_EL2] 2612825946eSMax Shvetsov msr cnthctl_el2, x13 2622825946eSMax Shvetsov msr cnthp_ctl_el2, x14 26328f39f02SMax Shvetsov 2642825946eSMax Shvetsov ldp x15, x16, [x0, #CTX_CNTHP_CVAL_EL2] 2652825946eSMax Shvetsov msr cnthp_cval_el2, x15 2662825946eSMax Shvetsov msr cnthp_tval_el2, x16 26728f39f02SMax Shvetsov 2682825946eSMax Shvetsov ldp x17, x9, [x0, #CTX_CNTVOFF_EL2] 2692825946eSMax Shvetsov msr cntvoff_el2, x17 27028f39f02SMax Shvetsov msr cptr_el2, x9 27128f39f02SMax Shvetsov 2722825946eSMax Shvetsov ldp x10, x11, [x0, #CTX_DBGVCR32_EL2] 2732825946eSMax Shvetsov msr dbgvcr32_el2, x10 2742825946eSMax Shvetsov msr elr_el2, x11 27528f39f02SMax Shvetsov 2762825946eSMax Shvetsov ldp x14, x15, [x0, #CTX_ESR_EL2] 2772825946eSMax Shvetsov msr esr_el2, x14 2782825946eSMax Shvetsov msr far_el2, x15 27928f39f02SMax Shvetsov 2802825946eSMax Shvetsov ldp x16, x17, [x0, #CTX_FPEXC32_EL2] 2812825946eSMax Shvetsov msr fpexc32_el2, x16 2822825946eSMax Shvetsov msr hacr_el2, x17 28328f39f02SMax Shvetsov 2842825946eSMax Shvetsov ldp x9, x10, [x0, #CTX_HCR_EL2] 28528f39f02SMax Shvetsov msr hcr_el2, x9 2862825946eSMax Shvetsov msr hpfar_el2, x10 28728f39f02SMax Shvetsov 2882825946eSMax Shvetsov ldp x11, x12, [x0, #CTX_HSTR_EL2] 2892825946eSMax Shvetsov msr hstr_el2, x11 2902825946eSMax Shvetsov msr ICC_SRE_EL2, x12 29128f39f02SMax Shvetsov 2922825946eSMax Shvetsov ldp x13, x14, [x0, #CTX_ICH_HCR_EL2] 2932825946eSMax Shvetsov msr ICH_HCR_EL2, x13 2942825946eSMax Shvetsov msr ICH_VMCR_EL2, x14 29528f39f02SMax Shvetsov 2962825946eSMax Shvetsov ldp x15, x16, [x0, #CTX_MAIR_EL2] 2972825946eSMax Shvetsov msr mair_el2, x15 2982825946eSMax Shvetsov msr mdcr_el2, x16 29928f39f02SMax Shvetsov 300*45aecff0SManish V Badarkhe ldr x17, [x0, #CTX_PMSCR_EL2] 3012825946eSMax Shvetsov msr PMSCR_EL2, x17 30228f39f02SMax Shvetsov 3032825946eSMax Shvetsov ldp x10, x11, [x0, #CTX_SPSR_EL2] 3042825946eSMax Shvetsov msr spsr_el2, x10 3052825946eSMax Shvetsov msr sp_el2, x11 30628f39f02SMax Shvetsov 307*45aecff0SManish V Badarkhe ldr x12, [x0, #CTX_TPIDR_EL2] 308*45aecff0SManish V Badarkhe msr tpidr_el2, x12 30928f39f02SMax Shvetsov 3102825946eSMax Shvetsov ldp x14, x15, [x0, #CTX_TTBR0_EL2] 3112825946eSMax Shvetsov msr ttbr0_el2, x14 3122825946eSMax Shvetsov msr vbar_el2, x15 31328f39f02SMax Shvetsov 3142825946eSMax Shvetsov ldp x16, x17, [x0, #CTX_VMPIDR_EL2] 3152825946eSMax Shvetsov msr vmpidr_el2, x16 3162825946eSMax Shvetsov msr vpidr_el2, x17 31728f39f02SMax Shvetsov 3182825946eSMax Shvetsov ldp x9, x10, [x0, #CTX_VTCR_EL2] 31928f39f02SMax Shvetsov msr vtcr_el2, x9 3202825946eSMax Shvetsov msr vttbr_el2, x10 32128f39f02SMax Shvetsov 3222825946eSMax Shvetsov#if CTX_INCLUDE_MTE_REGS 3232825946eSMax Shvetsov ldr x11, [x0, #CTX_TFSR_EL2] 3242825946eSMax Shvetsov msr TFSR_EL2, x11 3252825946eSMax Shvetsov#endif 32628f39f02SMax Shvetsov 3272825946eSMax Shvetsov#if ENABLE_MPAM_FOR_LOWER_ELS 3282825946eSMax Shvetsov ldp x9, x10, [x0, #CTX_MPAM2_EL2] 3292825946eSMax Shvetsov msr MPAM2_EL2, x9 3302825946eSMax Shvetsov msr MPAMHCR_EL2, x10 3312825946eSMax Shvetsov 3322825946eSMax Shvetsov ldp x11, x12, [x0, #CTX_MPAMVPM0_EL2] 3332825946eSMax Shvetsov msr MPAMVPM0_EL2, x11 3342825946eSMax Shvetsov msr MPAMVPM1_EL2, x12 3352825946eSMax Shvetsov 3362825946eSMax Shvetsov ldp x13, x14, [x0, #CTX_MPAMVPM2_EL2] 3372825946eSMax Shvetsov msr MPAMVPM2_EL2, x13 3382825946eSMax Shvetsov msr MPAMVPM3_EL2, x14 3392825946eSMax Shvetsov 3402825946eSMax Shvetsov ldp x15, x16, [x0, #CTX_MPAMVPM4_EL2] 3412825946eSMax Shvetsov msr MPAMVPM4_EL2, x15 3422825946eSMax Shvetsov msr MPAMVPM5_EL2, x16 3432825946eSMax Shvetsov 3442825946eSMax Shvetsov ldp x17, x9, [x0, #CTX_MPAMVPM6_EL2] 3452825946eSMax Shvetsov msr MPAMVPM6_EL2, x17 3462825946eSMax Shvetsov msr MPAMVPM7_EL2, x9 3472825946eSMax Shvetsov 3482825946eSMax Shvetsov ldr x10, [x0, #CTX_MPAMVPMV_EL2] 3492825946eSMax Shvetsov msr MPAMVPMV_EL2, x10 3502825946eSMax Shvetsov#endif 3512825946eSMax Shvetsov 3522825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 6) 3532825946eSMax Shvetsov ldp x11, x12, [x0, #CTX_HAFGRTR_EL2] 3542825946eSMax Shvetsov msr HAFGRTR_EL2, x11 3552825946eSMax Shvetsov msr HDFGRTR_EL2, x12 3562825946eSMax Shvetsov 3572825946eSMax Shvetsov ldp x13, x14, [x0, #CTX_HDFGWTR_EL2] 3582825946eSMax Shvetsov msr HDFGWTR_EL2, x13 3592825946eSMax Shvetsov msr HFGITR_EL2, x14 3602825946eSMax Shvetsov 3612825946eSMax Shvetsov ldp x15, x16, [x0, #CTX_HFGRTR_EL2] 3622825946eSMax Shvetsov msr HFGRTR_EL2, x15 3632825946eSMax Shvetsov msr HFGWTR_EL2, x16 3642825946eSMax Shvetsov 3652825946eSMax Shvetsov ldr x17, [x0, #CTX_CNTPOFF_EL2] 3662825946eSMax Shvetsov msr CNTPOFF_EL2, x17 3672825946eSMax Shvetsov#endif 3682825946eSMax Shvetsov 3692825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 4) 3702825946eSMax Shvetsov ldp x9, x10, [x0, #CTX_CNTHPS_CTL_EL2] 3712825946eSMax Shvetsov msr cnthps_ctl_el2, x9 3722825946eSMax Shvetsov msr cnthps_cval_el2, x10 3732825946eSMax Shvetsov 3742825946eSMax Shvetsov ldp x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2] 3752825946eSMax Shvetsov msr cnthps_tval_el2, x11 3762825946eSMax Shvetsov msr cnthvs_ctl_el2, x12 3772825946eSMax Shvetsov 3782825946eSMax Shvetsov ldp x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2] 3792825946eSMax Shvetsov msr cnthvs_cval_el2, x13 3802825946eSMax Shvetsov msr cnthvs_tval_el2, x14 3812825946eSMax Shvetsov 3822825946eSMax Shvetsov ldp x15, x16, [x0, #CTX_CNTHV_CTL_EL2] 3832825946eSMax Shvetsov msr cnthv_ctl_el2, x15 3842825946eSMax Shvetsov msr cnthv_cval_el2, x16 3852825946eSMax Shvetsov 3862825946eSMax Shvetsov ldp x17, x9, [x0, #CTX_CNTHV_TVAL_EL2] 3872825946eSMax Shvetsov msr cnthv_tval_el2, x17 3882825946eSMax Shvetsov msr contextidr_el2, x9 3892825946eSMax Shvetsov 3902825946eSMax Shvetsov ldr x10, [x0, #CTX_SDER32_EL2] 3912825946eSMax Shvetsov msr sder32_el2, x10 3922825946eSMax Shvetsov 3932825946eSMax Shvetsov ldr x11, [x0, #CTX_TTBR1_EL2] 3942825946eSMax Shvetsov msr ttbr1_el2, x11 3952825946eSMax Shvetsov 3962825946eSMax Shvetsov ldr x12, [x0, #CTX_VDISR_EL2] 3972825946eSMax Shvetsov msr vdisr_el2, x12 3982825946eSMax Shvetsov 3992825946eSMax Shvetsov ldr x13, [x0, #CTX_VNCR_EL2] 4002825946eSMax Shvetsov msr vncr_el2, x13 4012825946eSMax Shvetsov 4022825946eSMax Shvetsov ldr x14, [x0, #CTX_VSESR_EL2] 4032825946eSMax Shvetsov msr vsesr_el2, x14 4042825946eSMax Shvetsov 4052825946eSMax Shvetsov ldr x15, [x0, #CTX_VSTCR_EL2] 4062825946eSMax Shvetsov msr vstcr_el2, x15 4072825946eSMax Shvetsov 4082825946eSMax Shvetsov ldr x16, [x0, #CTX_VSTTBR_EL2] 4092825946eSMax Shvetsov msr vsttbr_el2, x16 4107f164a83SOlivier Deprez 4117f164a83SOlivier Deprez ldr x17, [x0, #CTX_TRFCR_EL2] 4127f164a83SOlivier Deprez msr TRFCR_EL2, x17 4132825946eSMax Shvetsov#endif 4142825946eSMax Shvetsov 4152825946eSMax Shvetsov#if ARM_ARCH_AT_LEAST(8, 5) 4167f164a83SOlivier Deprez ldr x9, [x0, #CTX_SCXTNUM_EL2] 4177f164a83SOlivier Deprez msr scxtnum_el2, x9 4182825946eSMax Shvetsov#endif 41928f39f02SMax Shvetsov 420*45aecff0SManish V Badarkhe#if ERRATA_SPECULATIVE_AT 421*45aecff0SManish V Badarkhe/* 422*45aecff0SManish V Badarkhe * Make sure all registers are stored successfully except 423*45aecff0SManish V Badarkhe * SCTLR_EL2 and TCR_EL2 424*45aecff0SManish V Badarkhe */ 425*45aecff0SManish V Badarkhe isb 426*45aecff0SManish V Badarkhe#endif 427*45aecff0SManish V Badarkhe 428*45aecff0SManish V Badarkhe ldr x9, [x0, #CTX_SCTLR_EL2] 429*45aecff0SManish V Badarkhe msr sctlr_el2, x9 430*45aecff0SManish V Badarkhe ldr x9, [x0, #CTX_TCR_EL2] 431*45aecff0SManish V Badarkhe msr tcr_el2, x9 432*45aecff0SManish V Badarkhe 43328f39f02SMax Shvetsov ret 43428f39f02SMax Shvetsovendfunc el2_sysregs_context_restore 43528f39f02SMax Shvetsov 43628f39f02SMax Shvetsov#endif /* CTX_INCLUDE_EL2_REGS */ 43728f39f02SMax Shvetsov 438ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 439ed108b56SAlexei Fedorov * The following function strictly follows the AArch64 PCS to use 440ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers) to save EL1 system 441ed108b56SAlexei Fedorov * register context. It assumes that 'x0' is pointing to a 442ed108b56SAlexei Fedorov * 'el1_sys_regs' structure where the register context will be saved. 443ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 444532ed618SSoby Mathew */ 445532ed618SSoby Mathewfunc el1_sysregs_context_save 446532ed618SSoby Mathew 447532ed618SSoby Mathew mrs x9, spsr_el1 448532ed618SSoby Mathew mrs x10, elr_el1 449532ed618SSoby Mathew stp x9, x10, [x0, #CTX_SPSR_EL1] 450532ed618SSoby Mathew 451532ed618SSoby Mathew mrs x15, sctlr_el1 452532ed618SSoby Mathew mrs x16, actlr_el1 453532ed618SSoby Mathew stp x15, x16, [x0, #CTX_SCTLR_EL1] 454532ed618SSoby Mathew 455532ed618SSoby Mathew mrs x17, cpacr_el1 456532ed618SSoby Mathew mrs x9, csselr_el1 457532ed618SSoby Mathew stp x17, x9, [x0, #CTX_CPACR_EL1] 458532ed618SSoby Mathew 459532ed618SSoby Mathew mrs x10, sp_el1 460532ed618SSoby Mathew mrs x11, esr_el1 461532ed618SSoby Mathew stp x10, x11, [x0, #CTX_SP_EL1] 462532ed618SSoby Mathew 463532ed618SSoby Mathew mrs x12, ttbr0_el1 464532ed618SSoby Mathew mrs x13, ttbr1_el1 465532ed618SSoby Mathew stp x12, x13, [x0, #CTX_TTBR0_EL1] 466532ed618SSoby Mathew 467532ed618SSoby Mathew mrs x14, mair_el1 468532ed618SSoby Mathew mrs x15, amair_el1 469532ed618SSoby Mathew stp x14, x15, [x0, #CTX_MAIR_EL1] 470532ed618SSoby Mathew 471532ed618SSoby Mathew mrs x16, tcr_el1 472532ed618SSoby Mathew mrs x17, tpidr_el1 473532ed618SSoby Mathew stp x16, x17, [x0, #CTX_TCR_EL1] 474532ed618SSoby Mathew 475532ed618SSoby Mathew mrs x9, tpidr_el0 476532ed618SSoby Mathew mrs x10, tpidrro_el0 477532ed618SSoby Mathew stp x9, x10, [x0, #CTX_TPIDR_EL0] 478532ed618SSoby Mathew 479532ed618SSoby Mathew mrs x13, par_el1 480532ed618SSoby Mathew mrs x14, far_el1 481532ed618SSoby Mathew stp x13, x14, [x0, #CTX_PAR_EL1] 482532ed618SSoby Mathew 483532ed618SSoby Mathew mrs x15, afsr0_el1 484532ed618SSoby Mathew mrs x16, afsr1_el1 485532ed618SSoby Mathew stp x15, x16, [x0, #CTX_AFSR0_EL1] 486532ed618SSoby Mathew 487532ed618SSoby Mathew mrs x17, contextidr_el1 488532ed618SSoby Mathew mrs x9, vbar_el1 489532ed618SSoby Mathew stp x17, x9, [x0, #CTX_CONTEXTIDR_EL1] 490532ed618SSoby Mathew 491532ed618SSoby Mathew /* Save AArch32 system registers if the build has instructed so */ 492532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS 493532ed618SSoby Mathew mrs x11, spsr_abt 494532ed618SSoby Mathew mrs x12, spsr_und 495532ed618SSoby Mathew stp x11, x12, [x0, #CTX_SPSR_ABT] 496532ed618SSoby Mathew 497532ed618SSoby Mathew mrs x13, spsr_irq 498532ed618SSoby Mathew mrs x14, spsr_fiq 499532ed618SSoby Mathew stp x13, x14, [x0, #CTX_SPSR_IRQ] 500532ed618SSoby Mathew 501532ed618SSoby Mathew mrs x15, dacr32_el2 502532ed618SSoby Mathew mrs x16, ifsr32_el2 503532ed618SSoby Mathew stp x15, x16, [x0, #CTX_DACR32_EL2] 504532ed618SSoby Mathew#endif 505532ed618SSoby Mathew 506532ed618SSoby Mathew /* Save NS timer registers if the build has instructed so */ 507532ed618SSoby Mathew#if NS_TIMER_SWITCH 508532ed618SSoby Mathew mrs x10, cntp_ctl_el0 509532ed618SSoby Mathew mrs x11, cntp_cval_el0 510532ed618SSoby Mathew stp x10, x11, [x0, #CTX_CNTP_CTL_EL0] 511532ed618SSoby Mathew 512532ed618SSoby Mathew mrs x12, cntv_ctl_el0 513532ed618SSoby Mathew mrs x13, cntv_cval_el0 514532ed618SSoby Mathew stp x12, x13, [x0, #CTX_CNTV_CTL_EL0] 515532ed618SSoby Mathew 516532ed618SSoby Mathew mrs x14, cntkctl_el1 517532ed618SSoby Mathew str x14, [x0, #CTX_CNTKCTL_EL1] 518532ed618SSoby Mathew#endif 519532ed618SSoby Mathew 5209dd94382SJustin Chadwell /* Save MTE system registers if the build has instructed so */ 5219dd94382SJustin Chadwell#if CTX_INCLUDE_MTE_REGS 5229dd94382SJustin Chadwell mrs x15, TFSRE0_EL1 5239dd94382SJustin Chadwell mrs x16, TFSR_EL1 5249dd94382SJustin Chadwell stp x15, x16, [x0, #CTX_TFSRE0_EL1] 5259dd94382SJustin Chadwell 5269dd94382SJustin Chadwell mrs x9, RGSR_EL1 5279dd94382SJustin Chadwell mrs x10, GCR_EL1 5289dd94382SJustin Chadwell stp x9, x10, [x0, #CTX_RGSR_EL1] 5299dd94382SJustin Chadwell#endif 5309dd94382SJustin Chadwell 531532ed618SSoby Mathew ret 532532ed618SSoby Mathewendfunc el1_sysregs_context_save 533532ed618SSoby Mathew 534ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 535ed108b56SAlexei Fedorov * The following function strictly follows the AArch64 PCS to use 536ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers) to restore EL1 system 537ed108b56SAlexei Fedorov * register context. It assumes that 'x0' is pointing to a 538ed108b56SAlexei Fedorov * 'el1_sys_regs' structure from where the register context will be 539ed108b56SAlexei Fedorov * restored 540ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 541532ed618SSoby Mathew */ 542532ed618SSoby Mathewfunc el1_sysregs_context_restore 543532ed618SSoby Mathew 544*45aecff0SManish V Badarkhe#if ERRATA_SPECULATIVE_AT 545*45aecff0SManish V Badarkhe mrs x9, tcr_el1 546*45aecff0SManish V Badarkhe orr x9, x9, #TCR_EPD0_BIT 547*45aecff0SManish V Badarkhe orr x9, x9, #TCR_EPD1_BIT 548*45aecff0SManish V Badarkhe msr tcr_el1, x9 549*45aecff0SManish V Badarkhe mrs x9, sctlr_el1 550*45aecff0SManish V Badarkhe bic x9, x9, #SCTLR_M_BIT 551*45aecff0SManish V Badarkhe msr sctlr_el1, x9 552*45aecff0SManish V Badarkhe isb 553*45aecff0SManish V Badarkhe#endif 554*45aecff0SManish V Badarkhe 555532ed618SSoby Mathew ldp x9, x10, [x0, #CTX_SPSR_EL1] 556532ed618SSoby Mathew msr spsr_el1, x9 557532ed618SSoby Mathew msr elr_el1, x10 558532ed618SSoby Mathew 559*45aecff0SManish V Badarkhe ldr x16, [x0, #CTX_ACTLR_EL1] 560532ed618SSoby Mathew msr actlr_el1, x16 561532ed618SSoby Mathew 562532ed618SSoby Mathew ldp x17, x9, [x0, #CTX_CPACR_EL1] 563532ed618SSoby Mathew msr cpacr_el1, x17 564532ed618SSoby Mathew msr csselr_el1, x9 565532ed618SSoby Mathew 566532ed618SSoby Mathew ldp x10, x11, [x0, #CTX_SP_EL1] 567532ed618SSoby Mathew msr sp_el1, x10 568532ed618SSoby Mathew msr esr_el1, x11 569532ed618SSoby Mathew 570532ed618SSoby Mathew ldp x12, x13, [x0, #CTX_TTBR0_EL1] 571532ed618SSoby Mathew msr ttbr0_el1, x12 572532ed618SSoby Mathew msr ttbr1_el1, x13 573532ed618SSoby Mathew 574532ed618SSoby Mathew ldp x14, x15, [x0, #CTX_MAIR_EL1] 575532ed618SSoby Mathew msr mair_el1, x14 576532ed618SSoby Mathew msr amair_el1, x15 577532ed618SSoby Mathew 578*45aecff0SManish V Badarkhe ldr x16,[x0, #CTX_TPIDR_EL1] 579*45aecff0SManish V Badarkhe msr tpidr_el1, x16 580532ed618SSoby Mathew 581532ed618SSoby Mathew ldp x9, x10, [x0, #CTX_TPIDR_EL0] 582532ed618SSoby Mathew msr tpidr_el0, x9 583532ed618SSoby Mathew msr tpidrro_el0, x10 584532ed618SSoby Mathew 585532ed618SSoby Mathew ldp x13, x14, [x0, #CTX_PAR_EL1] 586532ed618SSoby Mathew msr par_el1, x13 587532ed618SSoby Mathew msr far_el1, x14 588532ed618SSoby Mathew 589532ed618SSoby Mathew ldp x15, x16, [x0, #CTX_AFSR0_EL1] 590532ed618SSoby Mathew msr afsr0_el1, x15 591532ed618SSoby Mathew msr afsr1_el1, x16 592532ed618SSoby Mathew 593532ed618SSoby Mathew ldp x17, x9, [x0, #CTX_CONTEXTIDR_EL1] 594532ed618SSoby Mathew msr contextidr_el1, x17 595532ed618SSoby Mathew msr vbar_el1, x9 596532ed618SSoby Mathew 597532ed618SSoby Mathew /* Restore AArch32 system registers if the build has instructed so */ 598532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS 599532ed618SSoby Mathew ldp x11, x12, [x0, #CTX_SPSR_ABT] 600532ed618SSoby Mathew msr spsr_abt, x11 601532ed618SSoby Mathew msr spsr_und, x12 602532ed618SSoby Mathew 603532ed618SSoby Mathew ldp x13, x14, [x0, #CTX_SPSR_IRQ] 604532ed618SSoby Mathew msr spsr_irq, x13 605532ed618SSoby Mathew msr spsr_fiq, x14 606532ed618SSoby Mathew 607532ed618SSoby Mathew ldp x15, x16, [x0, #CTX_DACR32_EL2] 608532ed618SSoby Mathew msr dacr32_el2, x15 609532ed618SSoby Mathew msr ifsr32_el2, x16 610532ed618SSoby Mathew#endif 611532ed618SSoby Mathew /* Restore NS timer registers if the build has instructed so */ 612532ed618SSoby Mathew#if NS_TIMER_SWITCH 613532ed618SSoby Mathew ldp x10, x11, [x0, #CTX_CNTP_CTL_EL0] 614532ed618SSoby Mathew msr cntp_ctl_el0, x10 615532ed618SSoby Mathew msr cntp_cval_el0, x11 616532ed618SSoby Mathew 617532ed618SSoby Mathew ldp x12, x13, [x0, #CTX_CNTV_CTL_EL0] 618532ed618SSoby Mathew msr cntv_ctl_el0, x12 619532ed618SSoby Mathew msr cntv_cval_el0, x13 620532ed618SSoby Mathew 621532ed618SSoby Mathew ldr x14, [x0, #CTX_CNTKCTL_EL1] 622532ed618SSoby Mathew msr cntkctl_el1, x14 623532ed618SSoby Mathew#endif 6249dd94382SJustin Chadwell /* Restore MTE system registers if the build has instructed so */ 6259dd94382SJustin Chadwell#if CTX_INCLUDE_MTE_REGS 6269dd94382SJustin Chadwell ldp x11, x12, [x0, #CTX_TFSRE0_EL1] 6279dd94382SJustin Chadwell msr TFSRE0_EL1, x11 6289dd94382SJustin Chadwell msr TFSR_EL1, x12 6299dd94382SJustin Chadwell 6309dd94382SJustin Chadwell ldp x13, x14, [x0, #CTX_RGSR_EL1] 6319dd94382SJustin Chadwell msr RGSR_EL1, x13 6329dd94382SJustin Chadwell msr GCR_EL1, x14 6339dd94382SJustin Chadwell#endif 634532ed618SSoby Mathew 635*45aecff0SManish V Badarkhe#if ERRATA_SPECULATIVE_AT 636*45aecff0SManish V Badarkhe/* 637*45aecff0SManish V Badarkhe * Make sure all registers are stored successfully except 638*45aecff0SManish V Badarkhe * SCTLR_EL1 and TCR_EL1 639*45aecff0SManish V Badarkhe */ 640*45aecff0SManish V Badarkhe isb 641*45aecff0SManish V Badarkhe#endif 642*45aecff0SManish V Badarkhe 643*45aecff0SManish V Badarkhe ldr x9, [x0, #CTX_SCTLR_EL1] 644*45aecff0SManish V Badarkhe msr sctlr_el1, x9 645*45aecff0SManish V Badarkhe ldr x9, [x0, #CTX_TCR_EL1] 646*45aecff0SManish V Badarkhe msr tcr_el1, x9 647*45aecff0SManish V Badarkhe 648532ed618SSoby Mathew /* No explict ISB required here as ERET covers it */ 649532ed618SSoby Mathew ret 650532ed618SSoby Mathewendfunc el1_sysregs_context_restore 651532ed618SSoby Mathew 652ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 653ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use 654ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers according to AArch64 PCS) 655ed108b56SAlexei Fedorov * to save floating point register context. It assumes that 'x0' is 656ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure where the register context will 657532ed618SSoby Mathew * be saved. 658532ed618SSoby Mathew * 659ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set. 660ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in 661ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared. 662532ed618SSoby Mathew * 663532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world 664ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 665532ed618SSoby Mathew */ 666532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS 667532ed618SSoby Mathewfunc fpregs_context_save 668532ed618SSoby Mathew stp q0, q1, [x0, #CTX_FP_Q0] 669532ed618SSoby Mathew stp q2, q3, [x0, #CTX_FP_Q2] 670532ed618SSoby Mathew stp q4, q5, [x0, #CTX_FP_Q4] 671532ed618SSoby Mathew stp q6, q7, [x0, #CTX_FP_Q6] 672532ed618SSoby Mathew stp q8, q9, [x0, #CTX_FP_Q8] 673532ed618SSoby Mathew stp q10, q11, [x0, #CTX_FP_Q10] 674532ed618SSoby Mathew stp q12, q13, [x0, #CTX_FP_Q12] 675532ed618SSoby Mathew stp q14, q15, [x0, #CTX_FP_Q14] 676532ed618SSoby Mathew stp q16, q17, [x0, #CTX_FP_Q16] 677532ed618SSoby Mathew stp q18, q19, [x0, #CTX_FP_Q18] 678532ed618SSoby Mathew stp q20, q21, [x0, #CTX_FP_Q20] 679532ed618SSoby Mathew stp q22, q23, [x0, #CTX_FP_Q22] 680532ed618SSoby Mathew stp q24, q25, [x0, #CTX_FP_Q24] 681532ed618SSoby Mathew stp q26, q27, [x0, #CTX_FP_Q26] 682532ed618SSoby Mathew stp q28, q29, [x0, #CTX_FP_Q28] 683532ed618SSoby Mathew stp q30, q31, [x0, #CTX_FP_Q30] 684532ed618SSoby Mathew 685532ed618SSoby Mathew mrs x9, fpsr 686532ed618SSoby Mathew str x9, [x0, #CTX_FP_FPSR] 687532ed618SSoby Mathew 688532ed618SSoby Mathew mrs x10, fpcr 689532ed618SSoby Mathew str x10, [x0, #CTX_FP_FPCR] 690532ed618SSoby Mathew 69191089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS 69291089f36SDavid Cunado mrs x11, fpexc32_el2 69391089f36SDavid Cunado str x11, [x0, #CTX_FP_FPEXC32_EL2] 69491089f36SDavid Cunado#endif 695532ed618SSoby Mathew ret 696532ed618SSoby Mathewendfunc fpregs_context_save 697532ed618SSoby Mathew 698ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 699ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use x9-x17 700ed108b56SAlexei Fedorov * (temporary caller-saved registers according to AArch64 PCS) to 701ed108b56SAlexei Fedorov * restore floating point register context. It assumes that 'x0' is 702ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure from where the register context 703532ed618SSoby Mathew * will be restored. 704532ed618SSoby Mathew * 705ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set. 706ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in 707ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared. 708532ed618SSoby Mathew * 709532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world 710ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 711532ed618SSoby Mathew */ 712532ed618SSoby Mathewfunc fpregs_context_restore 713532ed618SSoby Mathew ldp q0, q1, [x0, #CTX_FP_Q0] 714532ed618SSoby Mathew ldp q2, q3, [x0, #CTX_FP_Q2] 715532ed618SSoby Mathew ldp q4, q5, [x0, #CTX_FP_Q4] 716532ed618SSoby Mathew ldp q6, q7, [x0, #CTX_FP_Q6] 717532ed618SSoby Mathew ldp q8, q9, [x0, #CTX_FP_Q8] 718532ed618SSoby Mathew ldp q10, q11, [x0, #CTX_FP_Q10] 719532ed618SSoby Mathew ldp q12, q13, [x0, #CTX_FP_Q12] 720532ed618SSoby Mathew ldp q14, q15, [x0, #CTX_FP_Q14] 721532ed618SSoby Mathew ldp q16, q17, [x0, #CTX_FP_Q16] 722532ed618SSoby Mathew ldp q18, q19, [x0, #CTX_FP_Q18] 723532ed618SSoby Mathew ldp q20, q21, [x0, #CTX_FP_Q20] 724532ed618SSoby Mathew ldp q22, q23, [x0, #CTX_FP_Q22] 725532ed618SSoby Mathew ldp q24, q25, [x0, #CTX_FP_Q24] 726532ed618SSoby Mathew ldp q26, q27, [x0, #CTX_FP_Q26] 727532ed618SSoby Mathew ldp q28, q29, [x0, #CTX_FP_Q28] 728532ed618SSoby Mathew ldp q30, q31, [x0, #CTX_FP_Q30] 729532ed618SSoby Mathew 730532ed618SSoby Mathew ldr x9, [x0, #CTX_FP_FPSR] 731532ed618SSoby Mathew msr fpsr, x9 732532ed618SSoby Mathew 733532ed618SSoby Mathew ldr x10, [x0, #CTX_FP_FPCR] 734532ed618SSoby Mathew msr fpcr, x10 735532ed618SSoby Mathew 73691089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS 73791089f36SDavid Cunado ldr x11, [x0, #CTX_FP_FPEXC32_EL2] 73891089f36SDavid Cunado msr fpexc32_el2, x11 73991089f36SDavid Cunado#endif 740532ed618SSoby Mathew /* 741532ed618SSoby Mathew * No explict ISB required here as ERET to 742532ed618SSoby Mathew * switch to secure EL1 or non-secure world 743532ed618SSoby Mathew * covers it 744532ed618SSoby Mathew */ 745532ed618SSoby Mathew 746532ed618SSoby Mathew ret 747532ed618SSoby Mathewendfunc fpregs_context_restore 748532ed618SSoby Mathew#endif /* CTX_INCLUDE_FPREGS */ 749532ed618SSoby Mathew 750ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 751ed108b56SAlexei Fedorov * The following function is used to save and restore all the general 752ed108b56SAlexei Fedorov * purpose and ARMv8.3-PAuth (if enabled) registers. 753ed108b56SAlexei Fedorov * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3 754ed108b56SAlexei Fedorov * when ARMv8.5-PMU is implemented, and if called from Non-secure 755ed108b56SAlexei Fedorov * state saves PMCR_EL0 and disables Cycle Counter. 756ed108b56SAlexei Fedorov * 757ed108b56SAlexei Fedorov * Ideally we would only save and restore the callee saved registers 758ed108b56SAlexei Fedorov * when a world switch occurs but that type of implementation is more 759ed108b56SAlexei Fedorov * complex. So currently we will always save and restore these 760ed108b56SAlexei Fedorov * registers on entry and exit of EL3. 761ed108b56SAlexei Fedorov * These are not macros to ensure their invocation fits within the 32 762ed108b56SAlexei Fedorov * instructions per exception vector. 763532ed618SSoby Mathew * clobbers: x18 764ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 765532ed618SSoby Mathew */ 766ed108b56SAlexei Fedorovfunc save_gp_pmcr_pauth_regs 767532ed618SSoby Mathew stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 768532ed618SSoby Mathew stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 769532ed618SSoby Mathew stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 770532ed618SSoby Mathew stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] 771532ed618SSoby Mathew stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] 772532ed618SSoby Mathew stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] 773532ed618SSoby Mathew stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] 774532ed618SSoby Mathew stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] 775532ed618SSoby Mathew stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] 776532ed618SSoby Mathew stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] 777532ed618SSoby Mathew stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] 778532ed618SSoby Mathew stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] 779532ed618SSoby Mathew stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] 780532ed618SSoby Mathew stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] 781532ed618SSoby Mathew stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] 782532ed618SSoby Mathew mrs x18, sp_el0 783532ed618SSoby Mathew str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] 784532ed618SSoby Mathew 785ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 786ed108b56SAlexei Fedorov * Check if earlier initialization MDCR_EL3.SCCD to 1 failed, 787ed108b56SAlexei Fedorov * meaning that ARMv8-PMU is not implemented and PMCR_EL0 788ed108b56SAlexei Fedorov * should be saved in non-secure context. 789ed108b56SAlexei Fedorov * ---------------------------------------------------------- 790ef653d93SJeenu Viswambharan */ 791ed108b56SAlexei Fedorov mrs x9, mdcr_el3 792ed108b56SAlexei Fedorov tst x9, #MDCR_SCCD_BIT 793ed108b56SAlexei Fedorov bne 1f 794ed108b56SAlexei Fedorov 795ed108b56SAlexei Fedorov /* Secure Cycle Counter is not disabled */ 796ed108b56SAlexei Fedorov mrs x9, pmcr_el0 797ed108b56SAlexei Fedorov 798ed108b56SAlexei Fedorov /* Check caller's security state */ 799ed108b56SAlexei Fedorov mrs x10, scr_el3 800ed108b56SAlexei Fedorov tst x10, #SCR_NS_BIT 801ed108b56SAlexei Fedorov beq 2f 802ed108b56SAlexei Fedorov 803ed108b56SAlexei Fedorov /* Save PMCR_EL0 if called from Non-secure state */ 804ed108b56SAlexei Fedorov str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] 805ed108b56SAlexei Fedorov 806ed108b56SAlexei Fedorov /* Disable cycle counter when event counting is prohibited */ 807ed108b56SAlexei Fedorov2: orr x9, x9, #PMCR_EL0_DP_BIT 808ed108b56SAlexei Fedorov msr pmcr_el0, x9 809ed108b56SAlexei Fedorov isb 810ed108b56SAlexei Fedorov1: 811ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS 812ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 813ed108b56SAlexei Fedorov * Save the ARMv8.3-PAuth keys as they are not banked 814ed108b56SAlexei Fedorov * by exception level 815ed108b56SAlexei Fedorov * ---------------------------------------------------------- 816ed108b56SAlexei Fedorov */ 817ed108b56SAlexei Fedorov add x19, sp, #CTX_PAUTH_REGS_OFFSET 818ed108b56SAlexei Fedorov 819ed108b56SAlexei Fedorov mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */ 820ed108b56SAlexei Fedorov mrs x21, APIAKeyHi_EL1 821ed108b56SAlexei Fedorov mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */ 822ed108b56SAlexei Fedorov mrs x23, APIBKeyHi_EL1 823ed108b56SAlexei Fedorov mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */ 824ed108b56SAlexei Fedorov mrs x25, APDAKeyHi_EL1 825ed108b56SAlexei Fedorov mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */ 826ed108b56SAlexei Fedorov mrs x27, APDBKeyHi_EL1 827ed108b56SAlexei Fedorov mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */ 828ed108b56SAlexei Fedorov mrs x29, APGAKeyHi_EL1 829ed108b56SAlexei Fedorov 830ed108b56SAlexei Fedorov stp x20, x21, [x19, #CTX_PACIAKEY_LO] 831ed108b56SAlexei Fedorov stp x22, x23, [x19, #CTX_PACIBKEY_LO] 832ed108b56SAlexei Fedorov stp x24, x25, [x19, #CTX_PACDAKEY_LO] 833ed108b56SAlexei Fedorov stp x26, x27, [x19, #CTX_PACDBKEY_LO] 834ed108b56SAlexei Fedorov stp x28, x29, [x19, #CTX_PACGAKEY_LO] 835ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */ 836ed108b56SAlexei Fedorov 837ed108b56SAlexei Fedorov ret 838ed108b56SAlexei Fedorovendfunc save_gp_pmcr_pauth_regs 839ed108b56SAlexei Fedorov 840ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 841ed108b56SAlexei Fedorov * This function restores ARMv8.3-PAuth (if enabled) and all general 842ed108b56SAlexei Fedorov * purpose registers except x30 from the CPU context. 843ed108b56SAlexei Fedorov * x30 register must be explicitly restored by the caller. 844ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 845ed108b56SAlexei Fedorov */ 846ed108b56SAlexei Fedorovfunc restore_gp_pmcr_pauth_regs 847ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS 848ed108b56SAlexei Fedorov /* Restore the ARMv8.3 PAuth keys */ 849ed108b56SAlexei Fedorov add x10, sp, #CTX_PAUTH_REGS_OFFSET 850ed108b56SAlexei Fedorov 851ed108b56SAlexei Fedorov ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */ 852ed108b56SAlexei Fedorov ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */ 853ed108b56SAlexei Fedorov ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */ 854ed108b56SAlexei Fedorov ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */ 855ed108b56SAlexei Fedorov ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */ 856ed108b56SAlexei Fedorov 857ed108b56SAlexei Fedorov msr APIAKeyLo_EL1, x0 858ed108b56SAlexei Fedorov msr APIAKeyHi_EL1, x1 859ed108b56SAlexei Fedorov msr APIBKeyLo_EL1, x2 860ed108b56SAlexei Fedorov msr APIBKeyHi_EL1, x3 861ed108b56SAlexei Fedorov msr APDAKeyLo_EL1, x4 862ed108b56SAlexei Fedorov msr APDAKeyHi_EL1, x5 863ed108b56SAlexei Fedorov msr APDBKeyLo_EL1, x6 864ed108b56SAlexei Fedorov msr APDBKeyHi_EL1, x7 865ed108b56SAlexei Fedorov msr APGAKeyLo_EL1, x8 866ed108b56SAlexei Fedorov msr APGAKeyHi_EL1, x9 867ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */ 868ed108b56SAlexei Fedorov 869ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 870ed108b56SAlexei Fedorov * Restore PMCR_EL0 when returning to Non-secure state if 871ed108b56SAlexei Fedorov * Secure Cycle Counter is not disabled in MDCR_EL3 when 872ed108b56SAlexei Fedorov * ARMv8.5-PMU is implemented. 873ed108b56SAlexei Fedorov * ---------------------------------------------------------- 874ed108b56SAlexei Fedorov */ 875ed108b56SAlexei Fedorov mrs x0, scr_el3 876ed108b56SAlexei Fedorov tst x0, #SCR_NS_BIT 877ed108b56SAlexei Fedorov beq 2f 878ed108b56SAlexei Fedorov 879ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 880ed108b56SAlexei Fedorov * Back to Non-secure state. 881ed108b56SAlexei Fedorov * Check if earlier initialization MDCR_EL3.SCCD to 1 failed, 882ed108b56SAlexei Fedorov * meaning that ARMv8-PMU is not implemented and PMCR_EL0 883ed108b56SAlexei Fedorov * should be restored from non-secure context. 884ed108b56SAlexei Fedorov * ---------------------------------------------------------- 885ed108b56SAlexei Fedorov */ 886ed108b56SAlexei Fedorov mrs x0, mdcr_el3 887ed108b56SAlexei Fedorov tst x0, #MDCR_SCCD_BIT 888ed108b56SAlexei Fedorov bne 2f 889ed108b56SAlexei Fedorov ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] 890ed108b56SAlexei Fedorov msr pmcr_el0, x0 891ed108b56SAlexei Fedorov2: 892532ed618SSoby Mathew ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 893532ed618SSoby Mathew ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 894532ed618SSoby Mathew ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 895532ed618SSoby Mathew ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] 896532ed618SSoby Mathew ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] 897532ed618SSoby Mathew ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] 898532ed618SSoby Mathew ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] 899532ed618SSoby Mathew ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] 900ef653d93SJeenu Viswambharan ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] 901532ed618SSoby Mathew ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] 902532ed618SSoby Mathew ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] 903532ed618SSoby Mathew ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] 904532ed618SSoby Mathew ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] 905532ed618SSoby Mathew ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] 906ef653d93SJeenu Viswambharan ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] 907ef653d93SJeenu Viswambharan msr sp_el0, x28 908532ed618SSoby Mathew ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] 909ef653d93SJeenu Viswambharan ret 910ed108b56SAlexei Fedorovendfunc restore_gp_pmcr_pauth_regs 911ef653d93SJeenu Viswambharan 912ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 913ed108b56SAlexei Fedorov * This routine assumes that the SP_EL3 is pointing to a valid 914ed108b56SAlexei Fedorov * context structure from where the gp regs and other special 915ed108b56SAlexei Fedorov * registers can be retrieved. 916ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 917532ed618SSoby Mathew */ 918532ed618SSoby Mathewfunc el3_exit 919bb9549baSJan Dabros#if ENABLE_ASSERTIONS 920bb9549baSJan Dabros /* el3_exit assumes SP_EL0 on entry */ 921bb9549baSJan Dabros mrs x17, spsel 922bb9549baSJan Dabros cmp x17, #MODE_SP_EL0 923bb9549baSJan Dabros ASM_ASSERT(eq) 924bb9549baSJan Dabros#endif 925bb9549baSJan Dabros 926ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 927ed108b56SAlexei Fedorov * Save the current SP_EL0 i.e. the EL3 runtime stack which 928ed108b56SAlexei Fedorov * will be used for handling the next SMC. 929ed108b56SAlexei Fedorov * Then switch to SP_EL3. 930ed108b56SAlexei Fedorov * ---------------------------------------------------------- 931532ed618SSoby Mathew */ 932532ed618SSoby Mathew mov x17, sp 933ed108b56SAlexei Fedorov msr spsel, #MODE_SP_ELX 934532ed618SSoby Mathew str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] 935532ed618SSoby Mathew 936ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 937532ed618SSoby Mathew * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET 938ed108b56SAlexei Fedorov * ---------------------------------------------------------- 939532ed618SSoby Mathew */ 940532ed618SSoby Mathew ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] 941532ed618SSoby Mathew ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] 942532ed618SSoby Mathew msr scr_el3, x18 943532ed618SSoby Mathew msr spsr_el3, x16 944532ed618SSoby Mathew msr elr_el3, x17 945532ed618SSoby Mathew 946fe007b2eSDimitris Papastamos#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 947ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 948ed108b56SAlexei Fedorov * Restore mitigation state as it was on entry to EL3 949ed108b56SAlexei Fedorov * ---------------------------------------------------------- 950ed108b56SAlexei Fedorov */ 951fe007b2eSDimitris Papastamos ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE] 952ed108b56SAlexei Fedorov cbz x17, 1f 953fe007b2eSDimitris Papastamos blr x17 9544d1ccf0eSAntonio Nino Diaz1: 955fe007b2eSDimitris Papastamos#endif 956ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 957ed108b56SAlexei Fedorov * Restore general purpose (including x30), PMCR_EL0 and 958ed108b56SAlexei Fedorov * ARMv8.3-PAuth registers. 959ed108b56SAlexei Fedorov * Exit EL3 via ERET to a lower exception level. 960ed108b56SAlexei Fedorov * ---------------------------------------------------------- 961ed108b56SAlexei Fedorov */ 962ed108b56SAlexei Fedorov bl restore_gp_pmcr_pauth_regs 963ed108b56SAlexei Fedorov ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 964fe007b2eSDimitris Papastamos 965ed108b56SAlexei Fedorov#if IMAGE_BL31 && RAS_EXTENSION 966ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 967ed108b56SAlexei Fedorov * Issue Error Synchronization Barrier to synchronize SErrors 968ed108b56SAlexei Fedorov * before exiting EL3. We're running with EAs unmasked, so 969ed108b56SAlexei Fedorov * any synchronized errors would be taken immediately; 970ed108b56SAlexei Fedorov * therefore no need to inspect DISR_EL1 register. 971ed108b56SAlexei Fedorov * ---------------------------------------------------------- 972ed108b56SAlexei Fedorov */ 973ed108b56SAlexei Fedorov esb 9745283962eSAntonio Nino Diaz#endif 975f461fe34SAnthony Steinhauser exception_return 9765283962eSAntonio Nino Diaz 977532ed618SSoby Mathewendfunc el3_exit 978