1532ed618SSoby Mathew/* 230788a84SGovindraj Raja * Copyright (c) 2013-2024, Arm Limited and Contributors. All rights reserved. 3532ed618SSoby Mathew * 482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause 5532ed618SSoby Mathew */ 6532ed618SSoby Mathew 7532ed618SSoby Mathew#include <arch.h> 8532ed618SSoby Mathew#include <asm_macros.S> 9bb9549baSJan Dabros#include <assert_macros.S> 10532ed618SSoby Mathew#include <context.h> 113b8456bdSManish V Badarkhe#include <el3_common_macros.S> 12532ed618SSoby Mathew 13532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS 14532ed618SSoby Mathew .global fpregs_context_save 15532ed618SSoby Mathew .global fpregs_context_restore 160ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_FPREGS */ 1759b7c0a0SJayanth Dodderi Chidanand 1859b7c0a0SJayanth Dodderi Chidanand#if ERRATA_SPECULATIVE_AT 1959b7c0a0SJayanth Dodderi Chidanand .global save_and_update_ptw_el1_sys_regs 2059b7c0a0SJayanth Dodderi Chidanand#endif /* ERRATA_SPECULATIVE_AT */ 2159b7c0a0SJayanth Dodderi Chidanand 2297215e0fSDaniel Boulby .global prepare_el3_entry 23ed108b56SAlexei Fedorov .global restore_gp_pmcr_pauth_regs 24532ed618SSoby Mathew .global el3_exit 25532ed618SSoby Mathew 26ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 27ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use 28ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers according to AArch64 PCS) 29ed108b56SAlexei Fedorov * to save floating point register context. It assumes that 'x0' is 30ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure where the register context will 31532ed618SSoby Mathew * be saved. 32532ed618SSoby Mathew * 33ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set. 34ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in 35ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared. 36532ed618SSoby Mathew * 37532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world 38ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 39532ed618SSoby Mathew */ 40532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS 41532ed618SSoby Mathewfunc fpregs_context_save 42532ed618SSoby Mathew stp q0, q1, [x0, #CTX_FP_Q0] 43532ed618SSoby Mathew stp q2, q3, [x0, #CTX_FP_Q2] 44532ed618SSoby Mathew stp q4, q5, [x0, #CTX_FP_Q4] 45532ed618SSoby Mathew stp q6, q7, [x0, #CTX_FP_Q6] 46532ed618SSoby Mathew stp q8, q9, [x0, #CTX_FP_Q8] 47532ed618SSoby Mathew stp q10, q11, [x0, #CTX_FP_Q10] 48532ed618SSoby Mathew stp q12, q13, [x0, #CTX_FP_Q12] 49532ed618SSoby Mathew stp q14, q15, [x0, #CTX_FP_Q14] 50532ed618SSoby Mathew stp q16, q17, [x0, #CTX_FP_Q16] 51532ed618SSoby Mathew stp q18, q19, [x0, #CTX_FP_Q18] 52532ed618SSoby Mathew stp q20, q21, [x0, #CTX_FP_Q20] 53532ed618SSoby Mathew stp q22, q23, [x0, #CTX_FP_Q22] 54532ed618SSoby Mathew stp q24, q25, [x0, #CTX_FP_Q24] 55532ed618SSoby Mathew stp q26, q27, [x0, #CTX_FP_Q26] 56532ed618SSoby Mathew stp q28, q29, [x0, #CTX_FP_Q28] 57532ed618SSoby Mathew stp q30, q31, [x0, #CTX_FP_Q30] 58532ed618SSoby Mathew 59532ed618SSoby Mathew mrs x9, fpsr 60532ed618SSoby Mathew str x9, [x0, #CTX_FP_FPSR] 61532ed618SSoby Mathew 62532ed618SSoby Mathew mrs x10, fpcr 63532ed618SSoby Mathew str x10, [x0, #CTX_FP_FPCR] 64532ed618SSoby Mathew 6591089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS 6691089f36SDavid Cunado mrs x11, fpexc32_el2 6791089f36SDavid Cunado str x11, [x0, #CTX_FP_FPEXC32_EL2] 680ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */ 69532ed618SSoby Mathew ret 70532ed618SSoby Mathewendfunc fpregs_context_save 71532ed618SSoby Mathew 72ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 73ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use x9-x17 74ed108b56SAlexei Fedorov * (temporary caller-saved registers according to AArch64 PCS) to 75ed108b56SAlexei Fedorov * restore floating point register context. It assumes that 'x0' is 76ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure from where the register context 77532ed618SSoby Mathew * will be restored. 78532ed618SSoby Mathew * 79ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set. 80ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in 81ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared. 82532ed618SSoby Mathew * 83532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world 84ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 85532ed618SSoby Mathew */ 86532ed618SSoby Mathewfunc fpregs_context_restore 87532ed618SSoby Mathew ldp q0, q1, [x0, #CTX_FP_Q0] 88532ed618SSoby Mathew ldp q2, q3, [x0, #CTX_FP_Q2] 89532ed618SSoby Mathew ldp q4, q5, [x0, #CTX_FP_Q4] 90532ed618SSoby Mathew ldp q6, q7, [x0, #CTX_FP_Q6] 91532ed618SSoby Mathew ldp q8, q9, [x0, #CTX_FP_Q8] 92532ed618SSoby Mathew ldp q10, q11, [x0, #CTX_FP_Q10] 93532ed618SSoby Mathew ldp q12, q13, [x0, #CTX_FP_Q12] 94532ed618SSoby Mathew ldp q14, q15, [x0, #CTX_FP_Q14] 95532ed618SSoby Mathew ldp q16, q17, [x0, #CTX_FP_Q16] 96532ed618SSoby Mathew ldp q18, q19, [x0, #CTX_FP_Q18] 97532ed618SSoby Mathew ldp q20, q21, [x0, #CTX_FP_Q20] 98532ed618SSoby Mathew ldp q22, q23, [x0, #CTX_FP_Q22] 99532ed618SSoby Mathew ldp q24, q25, [x0, #CTX_FP_Q24] 100532ed618SSoby Mathew ldp q26, q27, [x0, #CTX_FP_Q26] 101532ed618SSoby Mathew ldp q28, q29, [x0, #CTX_FP_Q28] 102532ed618SSoby Mathew ldp q30, q31, [x0, #CTX_FP_Q30] 103532ed618SSoby Mathew 104532ed618SSoby Mathew ldr x9, [x0, #CTX_FP_FPSR] 105532ed618SSoby Mathew msr fpsr, x9 106532ed618SSoby Mathew 107532ed618SSoby Mathew ldr x10, [x0, #CTX_FP_FPCR] 108532ed618SSoby Mathew msr fpcr, x10 109532ed618SSoby Mathew 11091089f36SDavid Cunado#if CTX_INCLUDE_AARCH32_REGS 11191089f36SDavid Cunado ldr x11, [x0, #CTX_FP_FPEXC32_EL2] 11291089f36SDavid Cunado msr fpexc32_el2, x11 1130ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_AARCH32_REGS */ 1140ce220afSJayanth Dodderi Chidanand 115532ed618SSoby Mathew /* 116532ed618SSoby Mathew * No explict ISB required here as ERET to 117532ed618SSoby Mathew * switch to secure EL1 or non-secure world 118532ed618SSoby Mathew * covers it 119532ed618SSoby Mathew */ 120532ed618SSoby Mathew 121532ed618SSoby Mathew ret 122532ed618SSoby Mathewendfunc fpregs_context_restore 123532ed618SSoby Mathew#endif /* CTX_INCLUDE_FPREGS */ 124532ed618SSoby Mathew 1257d33ffe4SDaniel Boulby /* 1261cbe42a5SManish Pandey * Set SCR_EL3.EA bit to enable SErrors at EL3 1271cbe42a5SManish Pandey */ 1281cbe42a5SManish Pandey .macro enable_serror_at_el3 1291cbe42a5SManish Pandey mrs x8, scr_el3 1301cbe42a5SManish Pandey orr x8, x8, #SCR_EA_BIT 1311cbe42a5SManish Pandey msr scr_el3, x8 1321cbe42a5SManish Pandey .endm 1331cbe42a5SManish Pandey 1341cbe42a5SManish Pandey /* 1357d33ffe4SDaniel Boulby * Set the PSTATE bits not set when the exception was taken as 1367d33ffe4SDaniel Boulby * described in the AArch64.TakeException() pseudocode function 1377d33ffe4SDaniel Boulby * in ARM DDI 0487F.c page J1-7635 to a default value. 1387d33ffe4SDaniel Boulby */ 1397d33ffe4SDaniel Boulby .macro set_unset_pstate_bits 1407d33ffe4SDaniel Boulby /* 1417d33ffe4SDaniel Boulby * If Data Independent Timing (DIT) functionality is implemented, 1427d33ffe4SDaniel Boulby * always enable DIT in EL3 1437d33ffe4SDaniel Boulby */ 1447d33ffe4SDaniel Boulby#if ENABLE_FEAT_DIT 145*43d1d951SManish Pandey#if ENABLE_FEAT_DIT >= 2 14688727fc3SAndre Przywara mrs x8, id_aa64pfr0_el1 14788727fc3SAndre Przywara and x8, x8, #(ID_AA64PFR0_DIT_MASK << ID_AA64PFR0_DIT_SHIFT) 14888727fc3SAndre Przywara cbz x8, 1f 14988727fc3SAndre Przywara#endif 1507d33ffe4SDaniel Boulby mov x8, #DIT_BIT 1517d33ffe4SDaniel Boulby msr DIT, x8 15288727fc3SAndre Przywara1: 1537d33ffe4SDaniel Boulby#endif /* ENABLE_FEAT_DIT */ 1547d33ffe4SDaniel Boulby .endm /* set_unset_pstate_bits */ 1557d33ffe4SDaniel Boulby 156edebefbcSArvind Ram Prakash/*------------------------------------------------------------------------- 157edebefbcSArvind Ram Prakash * This macro checks the ENABLE_FEAT_MPAM state, performs ID register 158edebefbcSArvind Ram Prakash * check to see if the platform supports MPAM extension and restores MPAM3 159edebefbcSArvind Ram Prakash * register value if it is FEAT_STATE_ENABLED/FEAT_STATE_CHECKED. 160edebefbcSArvind Ram Prakash * 161edebefbcSArvind Ram Prakash * This is particularly more complicated because we can't check 162edebefbcSArvind Ram Prakash * if the platform supports MPAM by looking for status of a particular bit 163edebefbcSArvind Ram Prakash * in the MDCR_EL3 or CPTR_EL3 register like other extensions. 164edebefbcSArvind Ram Prakash * ------------------------------------------------------------------------ 165edebefbcSArvind Ram Prakash */ 166edebefbcSArvind Ram Prakash 167edebefbcSArvind Ram Prakash .macro restore_mpam3_el3 168edebefbcSArvind Ram Prakash#if ENABLE_FEAT_MPAM 169*43d1d951SManish Pandey#if ENABLE_FEAT_MPAM >= 2 170edebefbcSArvind Ram Prakash mrs x8, id_aa64pfr0_el1 171edebefbcSArvind Ram Prakash lsr x8, x8, #(ID_AA64PFR0_MPAM_SHIFT) 172edebefbcSArvind Ram Prakash and x8, x8, #(ID_AA64PFR0_MPAM_MASK) 173edebefbcSArvind Ram Prakash mrs x7, id_aa64pfr1_el1 174edebefbcSArvind Ram Prakash lsr x7, x7, #(ID_AA64PFR1_MPAM_FRAC_SHIFT) 175edebefbcSArvind Ram Prakash and x7, x7, #(ID_AA64PFR1_MPAM_FRAC_MASK) 176edebefbcSArvind Ram Prakash orr x7, x7, x8 177edebefbcSArvind Ram Prakash cbz x7, no_mpam 178edebefbcSArvind Ram Prakash#endif 179edebefbcSArvind Ram Prakash /* ----------------------------------------------------------- 180edebefbcSArvind Ram Prakash * Restore MPAM3_EL3 register as per context state 181edebefbcSArvind Ram Prakash * Currently we only enable MPAM for NS world and trap to EL3 182edebefbcSArvind Ram Prakash * for MPAM access in lower ELs of Secure and Realm world 183ac4f6aafSArvind Ram Prakash * x9 holds address of the per_world context 184edebefbcSArvind Ram Prakash * ----------------------------------------------------------- 185edebefbcSArvind Ram Prakash */ 186ac4f6aafSArvind Ram Prakash 187ac4f6aafSArvind Ram Prakash ldr x17, [x9, #CTX_MPAM3_EL3] 188edebefbcSArvind Ram Prakash msr S3_6_C10_C5_0, x17 /* mpam3_el3 */ 189edebefbcSArvind Ram Prakash 190edebefbcSArvind Ram Prakashno_mpam: 191edebefbcSArvind Ram Prakash#endif 192edebefbcSArvind Ram Prakash .endm /* restore_mpam3_el3 */ 193edebefbcSArvind Ram Prakash 194ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 19597215e0fSDaniel Boulby * The following macro is used to save and restore all the general 196ed108b56SAlexei Fedorov * purpose and ARMv8.3-PAuth (if enabled) registers. 197d64bfef5SJayanth Dodderi Chidanand * It also checks if the Secure Cycle Counter (PMCCNTR_EL0) 198d64bfef5SJayanth Dodderi Chidanand * is disabled in EL3/Secure (ARMv8.5-PMU), wherein PMCCNTR_EL0 199d64bfef5SJayanth Dodderi Chidanand * needs not to be saved/restored during world switch. 200ed108b56SAlexei Fedorov * 201ed108b56SAlexei Fedorov * Ideally we would only save and restore the callee saved registers 202ed108b56SAlexei Fedorov * when a world switch occurs but that type of implementation is more 203ed108b56SAlexei Fedorov * complex. So currently we will always save and restore these 204ed108b56SAlexei Fedorov * registers on entry and exit of EL3. 205532ed618SSoby Mathew * clobbers: x18 206ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 207532ed618SSoby Mathew */ 20897215e0fSDaniel Boulby .macro save_gp_pmcr_pauth_regs 209532ed618SSoby Mathew stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 210532ed618SSoby Mathew stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 211532ed618SSoby Mathew stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 212532ed618SSoby Mathew stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] 213532ed618SSoby Mathew stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] 214532ed618SSoby Mathew stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] 215532ed618SSoby Mathew stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] 216532ed618SSoby Mathew stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] 217532ed618SSoby Mathew stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] 218532ed618SSoby Mathew stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] 219532ed618SSoby Mathew stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] 220532ed618SSoby Mathew stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] 221532ed618SSoby Mathew stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] 222532ed618SSoby Mathew stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] 223532ed618SSoby Mathew stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] 224532ed618SSoby Mathew mrs x18, sp_el0 225532ed618SSoby Mathew str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] 226c73686a1SBoyan Karatotev 227c73686a1SBoyan Karatotev /* PMUv3 is presumed to be always present */ 228ed108b56SAlexei Fedorov mrs x9, pmcr_el0 229ed108b56SAlexei Fedorov str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] 230ed108b56SAlexei Fedorov /* Disable cycle counter when event counting is prohibited */ 2311d6d6802SBoyan Karatotev orr x9, x9, #PMCR_EL0_DP_BIT 232ed108b56SAlexei Fedorov msr pmcr_el0, x9 233ed108b56SAlexei Fedorov isb 234ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS 235ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 236ed108b56SAlexei Fedorov * Save the ARMv8.3-PAuth keys as they are not banked 237ed108b56SAlexei Fedorov * by exception level 238ed108b56SAlexei Fedorov * ---------------------------------------------------------- 239ed108b56SAlexei Fedorov */ 240ed108b56SAlexei Fedorov add x19, sp, #CTX_PAUTH_REGS_OFFSET 241ed108b56SAlexei Fedorov 242ed108b56SAlexei Fedorov mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */ 243ed108b56SAlexei Fedorov mrs x21, APIAKeyHi_EL1 244ed108b56SAlexei Fedorov mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */ 245ed108b56SAlexei Fedorov mrs x23, APIBKeyHi_EL1 246ed108b56SAlexei Fedorov mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */ 247ed108b56SAlexei Fedorov mrs x25, APDAKeyHi_EL1 248ed108b56SAlexei Fedorov mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */ 249ed108b56SAlexei Fedorov mrs x27, APDBKeyHi_EL1 250ed108b56SAlexei Fedorov mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */ 251ed108b56SAlexei Fedorov mrs x29, APGAKeyHi_EL1 252ed108b56SAlexei Fedorov 253ed108b56SAlexei Fedorov stp x20, x21, [x19, #CTX_PACIAKEY_LO] 254ed108b56SAlexei Fedorov stp x22, x23, [x19, #CTX_PACIBKEY_LO] 255ed108b56SAlexei Fedorov stp x24, x25, [x19, #CTX_PACDAKEY_LO] 256ed108b56SAlexei Fedorov stp x26, x27, [x19, #CTX_PACDBKEY_LO] 257ed108b56SAlexei Fedorov stp x28, x29, [x19, #CTX_PACGAKEY_LO] 258ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */ 25997215e0fSDaniel Boulby .endm /* save_gp_pmcr_pauth_regs */ 26097215e0fSDaniel Boulby 26197215e0fSDaniel Boulby/* ----------------------------------------------------------------- 2627d33ffe4SDaniel Boulby * This function saves the context and sets the PSTATE to a known 2637d33ffe4SDaniel Boulby * state, preparing entry to el3. 26497215e0fSDaniel Boulby * Save all the general purpose and ARMv8.3-PAuth (if enabled) 26597215e0fSDaniel Boulby * registers. 2667d33ffe4SDaniel Boulby * Then set any of the PSTATE bits that are not set by hardware 2677d33ffe4SDaniel Boulby * according to the Aarch64.TakeException pseudocode in the Arm 2687d33ffe4SDaniel Boulby * Architecture Reference Manual to a default value for EL3. 2697d33ffe4SDaniel Boulby * clobbers: x17 27097215e0fSDaniel Boulby * ----------------------------------------------------------------- 27197215e0fSDaniel Boulby */ 27297215e0fSDaniel Boulbyfunc prepare_el3_entry 27397215e0fSDaniel Boulby save_gp_pmcr_pauth_regs 2741cbe42a5SManish Pandey enable_serror_at_el3 2757d33ffe4SDaniel Boulby /* 2767d33ffe4SDaniel Boulby * Set the PSTATE bits not described in the Aarch64.TakeException 2777d33ffe4SDaniel Boulby * pseudocode to their default values. 2787d33ffe4SDaniel Boulby */ 2797d33ffe4SDaniel Boulby set_unset_pstate_bits 280ed108b56SAlexei Fedorov ret 28197215e0fSDaniel Boulbyendfunc prepare_el3_entry 282ed108b56SAlexei Fedorov 283ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 284ed108b56SAlexei Fedorov * This function restores ARMv8.3-PAuth (if enabled) and all general 285ed108b56SAlexei Fedorov * purpose registers except x30 from the CPU context. 286ed108b56SAlexei Fedorov * x30 register must be explicitly restored by the caller. 287ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 288ed108b56SAlexei Fedorov */ 289ed108b56SAlexei Fedorovfunc restore_gp_pmcr_pauth_regs 290ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS 291ed108b56SAlexei Fedorov /* Restore the ARMv8.3 PAuth keys */ 292ed108b56SAlexei Fedorov add x10, sp, #CTX_PAUTH_REGS_OFFSET 293ed108b56SAlexei Fedorov 294ed108b56SAlexei Fedorov ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */ 295ed108b56SAlexei Fedorov ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */ 296ed108b56SAlexei Fedorov ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */ 297ed108b56SAlexei Fedorov ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */ 298ed108b56SAlexei Fedorov ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */ 299ed108b56SAlexei Fedorov 300ed108b56SAlexei Fedorov msr APIAKeyLo_EL1, x0 301ed108b56SAlexei Fedorov msr APIAKeyHi_EL1, x1 302ed108b56SAlexei Fedorov msr APIBKeyLo_EL1, x2 303ed108b56SAlexei Fedorov msr APIBKeyHi_EL1, x3 304ed108b56SAlexei Fedorov msr APDAKeyLo_EL1, x4 305ed108b56SAlexei Fedorov msr APDAKeyHi_EL1, x5 306ed108b56SAlexei Fedorov msr APDBKeyLo_EL1, x6 307ed108b56SAlexei Fedorov msr APDBKeyHi_EL1, x7 308ed108b56SAlexei Fedorov msr APGAKeyLo_EL1, x8 309ed108b56SAlexei Fedorov msr APGAKeyHi_EL1, x9 310ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */ 311c73686a1SBoyan Karatotev 312c73686a1SBoyan Karatotev /* PMUv3 is presumed to be always present */ 313ed108b56SAlexei Fedorov ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] 314ed108b56SAlexei Fedorov msr pmcr_el0, x0 315532ed618SSoby Mathew ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 316532ed618SSoby Mathew ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 317532ed618SSoby Mathew ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 318532ed618SSoby Mathew ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] 319532ed618SSoby Mathew ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] 320532ed618SSoby Mathew ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] 321532ed618SSoby Mathew ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] 322532ed618SSoby Mathew ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] 323ef653d93SJeenu Viswambharan ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] 324532ed618SSoby Mathew ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] 325532ed618SSoby Mathew ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] 326532ed618SSoby Mathew ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] 327532ed618SSoby Mathew ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] 328532ed618SSoby Mathew ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] 329ef653d93SJeenu Viswambharan ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] 330ef653d93SJeenu Viswambharan msr sp_el0, x28 331532ed618SSoby Mathew ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] 332ef653d93SJeenu Viswambharan ret 333ed108b56SAlexei Fedorovendfunc restore_gp_pmcr_pauth_regs 334ef653d93SJeenu Viswambharan 33559b7c0a0SJayanth Dodderi Chidanand#if ERRATA_SPECULATIVE_AT 33659b7c0a0SJayanth Dodderi Chidanand/* -------------------------------------------------------------------- 3373b8456bdSManish V Badarkhe * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1 3383b8456bdSManish V Badarkhe * registers and update EL1 registers to disable stage1 and stage2 33959b7c0a0SJayanth Dodderi Chidanand * page table walk. 34059b7c0a0SJayanth Dodderi Chidanand * -------------------------------------------------------------------- 3413b8456bdSManish V Badarkhe */ 3423b8456bdSManish V Badarkhefunc save_and_update_ptw_el1_sys_regs 3433b8456bdSManish V Badarkhe /* ---------------------------------------------------------- 3443b8456bdSManish V Badarkhe * Save only sctlr_el1 and tcr_el1 registers 3453b8456bdSManish V Badarkhe * ---------------------------------------------------------- 3463b8456bdSManish V Badarkhe */ 3473b8456bdSManish V Badarkhe mrs x29, sctlr_el1 34859b7c0a0SJayanth Dodderi Chidanand str x29, [sp, #(CTX_ERRATA_SPEC_AT_OFFSET + CTX_ERRATA_SPEC_AT_SCTLR_EL1)] 3493b8456bdSManish V Badarkhe mrs x29, tcr_el1 35059b7c0a0SJayanth Dodderi Chidanand str x29, [sp, #(CTX_ERRATA_SPEC_AT_OFFSET + CTX_ERRATA_SPEC_AT_TCR_EL1)] 3513b8456bdSManish V Badarkhe 3523b8456bdSManish V Badarkhe /* ------------------------------------------------------------ 3533b8456bdSManish V Badarkhe * Must follow below order in order to disable page table 3543b8456bdSManish V Badarkhe * walk for lower ELs (EL1 and EL0). First step ensures that 3553b8456bdSManish V Badarkhe * page table walk is disabled for stage1 and second step 3563b8456bdSManish V Badarkhe * ensures that page table walker should use TCR_EL1.EPDx 3573b8456bdSManish V Badarkhe * bits to perform address translation. ISB ensures that CPU 3583b8456bdSManish V Badarkhe * does these 2 steps in order. 3593b8456bdSManish V Badarkhe * 3603b8456bdSManish V Badarkhe * 1. Update TCR_EL1.EPDx bits to disable page table walk by 3613b8456bdSManish V Badarkhe * stage1. 3623b8456bdSManish V Badarkhe * 2. Enable MMU bit to avoid identity mapping via stage2 3633b8456bdSManish V Badarkhe * and force TCR_EL1.EPDx to be used by the page table 3643b8456bdSManish V Badarkhe * walker. 3653b8456bdSManish V Badarkhe * ------------------------------------------------------------ 3663b8456bdSManish V Badarkhe */ 3673b8456bdSManish V Badarkhe orr x29, x29, #(TCR_EPD0_BIT) 3683b8456bdSManish V Badarkhe orr x29, x29, #(TCR_EPD1_BIT) 3693b8456bdSManish V Badarkhe msr tcr_el1, x29 3703b8456bdSManish V Badarkhe isb 3713b8456bdSManish V Badarkhe mrs x29, sctlr_el1 3723b8456bdSManish V Badarkhe orr x29, x29, #SCTLR_M_BIT 3733b8456bdSManish V Badarkhe msr sctlr_el1, x29 3743b8456bdSManish V Badarkhe isb 3753b8456bdSManish V Badarkhe ret 3763b8456bdSManish V Badarkheendfunc save_and_update_ptw_el1_sys_regs 3773b8456bdSManish V Badarkhe 37859b7c0a0SJayanth Dodderi Chidanand#endif /* ERRATA_SPECULATIVE_AT */ 37959b7c0a0SJayanth Dodderi Chidanand 380461c0a5dSElizabeth Ho/* ----------------------------------------------------------------- 381461c0a5dSElizabeth Ho* The below macro returns the address of the per_world context for 382461c0a5dSElizabeth Ho* the security state, retrieved through "get_security_state" macro. 383461c0a5dSElizabeth Ho* The per_world context address is returned in the register argument. 384461c0a5dSElizabeth Ho* Clobbers: x9, x10 385461c0a5dSElizabeth Ho* ------------------------------------------------------------------ 386461c0a5dSElizabeth Ho*/ 387461c0a5dSElizabeth Ho 388461c0a5dSElizabeth Ho.macro get_per_world_context _reg:req 389461c0a5dSElizabeth Ho ldr x10, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] 390461c0a5dSElizabeth Ho get_security_state x9, x10 3914087ed6cSJayanth Dodderi Chidanand mov_imm x10, (CTX_PERWORLD_EL3STATE_END - CTX_CPTR_EL3) 392461c0a5dSElizabeth Ho mul x9, x9, x10 393461c0a5dSElizabeth Ho adrp x10, per_world_context 394461c0a5dSElizabeth Ho add x10, x10, :lo12:per_world_context 395461c0a5dSElizabeth Ho add x9, x9, x10 396461c0a5dSElizabeth Ho mov \_reg, x9 397461c0a5dSElizabeth Ho.endm 398461c0a5dSElizabeth Ho 399ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 400ed108b56SAlexei Fedorov * This routine assumes that the SP_EL3 is pointing to a valid 401ed108b56SAlexei Fedorov * context structure from where the gp regs and other special 402ed108b56SAlexei Fedorov * registers can be retrieved. 403ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 404532ed618SSoby Mathew */ 405532ed618SSoby Mathewfunc el3_exit 406bb9549baSJan Dabros#if ENABLE_ASSERTIONS 407bb9549baSJan Dabros /* el3_exit assumes SP_EL0 on entry */ 408bb9549baSJan Dabros mrs x17, spsel 409bb9549baSJan Dabros cmp x17, #MODE_SP_EL0 410bb9549baSJan Dabros ASM_ASSERT(eq) 4110ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_ASSERTIONS */ 412bb9549baSJan Dabros 413ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 414ed108b56SAlexei Fedorov * Save the current SP_EL0 i.e. the EL3 runtime stack which 415ed108b56SAlexei Fedorov * will be used for handling the next SMC. 416ed108b56SAlexei Fedorov * Then switch to SP_EL3. 417ed108b56SAlexei Fedorov * ---------------------------------------------------------- 418532ed618SSoby Mathew */ 419532ed618SSoby Mathew mov x17, sp 420ed108b56SAlexei Fedorov msr spsel, #MODE_SP_ELX 421532ed618SSoby Mathew str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] 422532ed618SSoby Mathew 4230c5e7d1cSMax Shvetsov /* ---------------------------------------------------------- 42468ac5ed0SArunachalam Ganapathy * Restore CPTR_EL3. 4250c5e7d1cSMax Shvetsov * ZCR is only restored if SVE is supported and enabled. 4260c5e7d1cSMax Shvetsov * Synchronization is required before zcr_el3 is addressed. 4270c5e7d1cSMax Shvetsov * ---------------------------------------------------------- 4280c5e7d1cSMax Shvetsov */ 429461c0a5dSElizabeth Ho 430461c0a5dSElizabeth Ho /* The address of the per_world context is stored in x9 */ 431461c0a5dSElizabeth Ho get_per_world_context x9 432461c0a5dSElizabeth Ho 433461c0a5dSElizabeth Ho ldp x19, x20, [x9, #CTX_CPTR_EL3] 4340c5e7d1cSMax Shvetsov msr cptr_el3, x19 4350c5e7d1cSMax Shvetsov 436f0c96a2eSBoyan Karatotev#if IMAGE_BL31 4370c5e7d1cSMax Shvetsov ands x19, x19, #CPTR_EZ_BIT 4380c5e7d1cSMax Shvetsov beq sve_not_enabled 4390c5e7d1cSMax Shvetsov 4400c5e7d1cSMax Shvetsov isb 4410c5e7d1cSMax Shvetsov msr S3_6_C1_C2_0, x20 /* zcr_el3 */ 4420c5e7d1cSMax Shvetsovsve_not_enabled: 443edebefbcSArvind Ram Prakash 444edebefbcSArvind Ram Prakash restore_mpam3_el3 445edebefbcSArvind Ram Prakash 4460ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 */ 4470c5e7d1cSMax Shvetsov 448fe007b2eSDimitris Papastamos#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 449ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 450ed108b56SAlexei Fedorov * Restore mitigation state as it was on entry to EL3 451ed108b56SAlexei Fedorov * ---------------------------------------------------------- 452ed108b56SAlexei Fedorov */ 453fe007b2eSDimitris Papastamos ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE] 454ed108b56SAlexei Fedorov cbz x17, 1f 455fe007b2eSDimitris Papastamos blr x17 4564d1ccf0eSAntonio Nino Diaz1: 4570ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */ 4580ce220afSJayanth Dodderi Chidanand 4596597fcf1SManish Pandey#if IMAGE_BL31 4606597fcf1SManish Pandey synchronize_errors 4616597fcf1SManish Pandey#endif /* IMAGE_BL31 */ 4620ce220afSJayanth Dodderi Chidanand 463123002f9SJayanth Dodderi Chidanand /* -------------------------------------------------------------- 464123002f9SJayanth Dodderi Chidanand * Restore MDCR_EL3, SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET 465123002f9SJayanth Dodderi Chidanand * -------------------------------------------------------------- 466ff1d2ef3SManish Pandey */ 467ff1d2ef3SManish Pandey ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] 468123002f9SJayanth Dodderi Chidanand ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] 469123002f9SJayanth Dodderi Chidanand ldr x19, [sp, #CTX_EL3STATE_OFFSET + CTX_MDCR_EL3] 470ff1d2ef3SManish Pandey msr spsr_el3, x16 471ff1d2ef3SManish Pandey msr elr_el3, x17 472123002f9SJayanth Dodderi Chidanand msr scr_el3, x18 473123002f9SJayanth Dodderi Chidanand msr mdcr_el3, x19 474ff1d2ef3SManish Pandey 475ff1d2ef3SManish Pandey restore_ptw_el1_sys_regs 476ff1d2ef3SManish Pandey 477ff1d2ef3SManish Pandey /* ---------------------------------------------------------- 478ff1d2ef3SManish Pandey * Restore general purpose (including x30), PMCR_EL0 and 479ff1d2ef3SManish Pandey * ARMv8.3-PAuth registers. 480ff1d2ef3SManish Pandey * Exit EL3 via ERET to a lower exception level. 481ff1d2ef3SManish Pandey * ---------------------------------------------------------- 482ff1d2ef3SManish Pandey */ 483ff1d2ef3SManish Pandey bl restore_gp_pmcr_pauth_regs 484ff1d2ef3SManish Pandey ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 485ff1d2ef3SManish Pandey 486c2d32a5fSMadhukar Pappireddy#ifdef IMAGE_BL31 487d04c04a4SManish Pandey /* Clear the EL3 flag as we are exiting el3 */ 488d04c04a4SManish Pandey str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_NESTED_EA_FLAG] 4890ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 */ 4900ce220afSJayanth Dodderi Chidanand 491f461fe34SAnthony Steinhauser exception_return 4925283962eSAntonio Nino Diaz 493532ed618SSoby Mathewendfunc el3_exit 494