1532ed618SSoby Mathew/* 230788a84SGovindraj Raja * Copyright (c) 2013-2024, Arm Limited and Contributors. All rights reserved. 3532ed618SSoby Mathew * 482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause 5532ed618SSoby Mathew */ 6532ed618SSoby Mathew 7532ed618SSoby Mathew#include <arch.h> 8532ed618SSoby Mathew#include <asm_macros.S> 9bb9549baSJan Dabros#include <assert_macros.S> 10532ed618SSoby Mathew#include <context.h> 113b8456bdSManish V Badarkhe#include <el3_common_macros.S> 12*6d5319afSMadhukar Pappireddy#include <platform_def.h> 13532ed618SSoby Mathew 14532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS 15532ed618SSoby Mathew .global fpregs_context_save 16532ed618SSoby Mathew .global fpregs_context_restore 170ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_FPREGS */ 1859b7c0a0SJayanth Dodderi Chidanand 19*6d5319afSMadhukar Pappireddy#if CTX_INCLUDE_SVE_REGS 20*6d5319afSMadhukar Pappireddy .global sve_context_save 21*6d5319afSMadhukar Pappireddy .global sve_context_restore 22*6d5319afSMadhukar Pappireddy#endif /* CTX_INCLUDE_SVE_REGS */ 23*6d5319afSMadhukar Pappireddy 2459b7c0a0SJayanth Dodderi Chidanand#if ERRATA_SPECULATIVE_AT 2559b7c0a0SJayanth Dodderi Chidanand .global save_and_update_ptw_el1_sys_regs 2659b7c0a0SJayanth Dodderi Chidanand#endif /* ERRATA_SPECULATIVE_AT */ 2759b7c0a0SJayanth Dodderi Chidanand 2897215e0fSDaniel Boulby .global prepare_el3_entry 29ed108b56SAlexei Fedorov .global restore_gp_pmcr_pauth_regs 30532ed618SSoby Mathew .global el3_exit 31532ed618SSoby Mathew 32*6d5319afSMadhukar Pappireddy/* Following macros will be used if any of CTX_INCLUDE_FPREGS or CTX_INCLUDE_SVE_REGS is enabled */ 33*6d5319afSMadhukar Pappireddy#if CTX_INCLUDE_FPREGS || CTX_INCLUDE_SVE_REGS 34*6d5319afSMadhukar Pappireddy.macro fpregs_state_save base:req hold:req 35*6d5319afSMadhukar Pappireddy mrs \hold, fpsr 36*6d5319afSMadhukar Pappireddy str \hold, [\base, #CTX_SIMD_FPSR] 37*6d5319afSMadhukar Pappireddy 38*6d5319afSMadhukar Pappireddy mrs \hold, fpcr 39*6d5319afSMadhukar Pappireddy str \hold, [\base, #CTX_SIMD_FPCR] 40*6d5319afSMadhukar Pappireddy 41*6d5319afSMadhukar Pappireddy#if CTX_INCLUDE_AARCH32_REGS && CTX_INCLUDE_FPREGS 42*6d5319afSMadhukar Pappireddy mrs \hold, fpexc32_el2 43*6d5319afSMadhukar Pappireddy str \hold, [\base, #CTX_SIMD_FPEXC32] 44*6d5319afSMadhukar Pappireddy#endif 45*6d5319afSMadhukar Pappireddy.endm 46*6d5319afSMadhukar Pappireddy 47*6d5319afSMadhukar Pappireddy.macro fpregs_state_restore base:req hold:req 48*6d5319afSMadhukar Pappireddy ldr \hold, [\base, #CTX_SIMD_FPSR] 49*6d5319afSMadhukar Pappireddy msr fpsr, \hold 50*6d5319afSMadhukar Pappireddy 51*6d5319afSMadhukar Pappireddy ldr \hold, [\base, #CTX_SIMD_FPCR] 52*6d5319afSMadhukar Pappireddy msr fpcr, \hold 53*6d5319afSMadhukar Pappireddy 54*6d5319afSMadhukar Pappireddy#if CTX_INCLUDE_AARCH32_REGS && CTX_INCLUDE_FPREGS 55*6d5319afSMadhukar Pappireddy ldr \hold, [\base, #CTX_SIMD_FPEXC32] 56*6d5319afSMadhukar Pappireddy msr fpexc32_el2, \hold 57*6d5319afSMadhukar Pappireddy#endif 58*6d5319afSMadhukar Pappireddy.endm 59*6d5319afSMadhukar Pappireddy 60*6d5319afSMadhukar Pappireddy#endif /* CTX_INCLUDE_FPREGS || CTX_INCLUDE_SVE_REGS */ 61*6d5319afSMadhukar Pappireddy 62ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 63ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use 64ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers according to AArch64 PCS) 65ed108b56SAlexei Fedorov * to save floating point register context. It assumes that 'x0' is 66ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure where the register context will 67532ed618SSoby Mathew * be saved. 68532ed618SSoby Mathew * 69ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set. 70ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in 71ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared. 72532ed618SSoby Mathew * 73532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world 74ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 75532ed618SSoby Mathew */ 76532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS 77532ed618SSoby Mathewfunc fpregs_context_save 78*6d5319afSMadhukar Pappireddy stp q0, q1, [x0], #32 79*6d5319afSMadhukar Pappireddy stp q2, q3, [x0], #32 80*6d5319afSMadhukar Pappireddy stp q4, q5, [x0], #32 81*6d5319afSMadhukar Pappireddy stp q6, q7, [x0], #32 82*6d5319afSMadhukar Pappireddy stp q8, q9, [x0], #32 83*6d5319afSMadhukar Pappireddy stp q10, q11, [x0], #32 84*6d5319afSMadhukar Pappireddy stp q12, q13, [x0], #32 85*6d5319afSMadhukar Pappireddy stp q14, q15, [x0], #32 86*6d5319afSMadhukar Pappireddy stp q16, q17, [x0], #32 87*6d5319afSMadhukar Pappireddy stp q18, q19, [x0], #32 88*6d5319afSMadhukar Pappireddy stp q20, q21, [x0], #32 89*6d5319afSMadhukar Pappireddy stp q22, q23, [x0], #32 90*6d5319afSMadhukar Pappireddy stp q24, q25, [x0], #32 91*6d5319afSMadhukar Pappireddy stp q26, q27, [x0], #32 92*6d5319afSMadhukar Pappireddy stp q28, q29, [x0], #32 93*6d5319afSMadhukar Pappireddy stp q30, q31, [x0], #32 94532ed618SSoby Mathew 95*6d5319afSMadhukar Pappireddy fpregs_state_save x0, x9 96532ed618SSoby Mathew 97532ed618SSoby Mathew ret 98532ed618SSoby Mathewendfunc fpregs_context_save 99532ed618SSoby Mathew 100ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 101ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use x9-x17 102ed108b56SAlexei Fedorov * (temporary caller-saved registers according to AArch64 PCS) to 103ed108b56SAlexei Fedorov * restore floating point register context. It assumes that 'x0' is 104ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure from where the register context 105532ed618SSoby Mathew * will be restored. 106532ed618SSoby Mathew * 107ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set. 108ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in 109ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared. 110532ed618SSoby Mathew * 111532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world 112ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 113532ed618SSoby Mathew */ 114532ed618SSoby Mathewfunc fpregs_context_restore 115*6d5319afSMadhukar Pappireddy ldp q0, q1, [x0], #32 116*6d5319afSMadhukar Pappireddy ldp q2, q3, [x0], #32 117*6d5319afSMadhukar Pappireddy ldp q4, q5, [x0], #32 118*6d5319afSMadhukar Pappireddy ldp q6, q7, [x0], #32 119*6d5319afSMadhukar Pappireddy ldp q8, q9, [x0], #32 120*6d5319afSMadhukar Pappireddy ldp q10, q11, [x0], #32 121*6d5319afSMadhukar Pappireddy ldp q12, q13, [x0], #32 122*6d5319afSMadhukar Pappireddy ldp q14, q15, [x0], #32 123*6d5319afSMadhukar Pappireddy ldp q16, q17, [x0], #32 124*6d5319afSMadhukar Pappireddy ldp q18, q19, [x0], #32 125*6d5319afSMadhukar Pappireddy ldp q20, q21, [x0], #32 126*6d5319afSMadhukar Pappireddy ldp q22, q23, [x0], #32 127*6d5319afSMadhukar Pappireddy ldp q24, q25, [x0], #32 128*6d5319afSMadhukar Pappireddy ldp q26, q27, [x0], #32 129*6d5319afSMadhukar Pappireddy ldp q28, q29, [x0], #32 130*6d5319afSMadhukar Pappireddy ldp q30, q31, [x0], #32 131532ed618SSoby Mathew 132*6d5319afSMadhukar Pappireddy fpregs_state_restore x0, x9 133532ed618SSoby Mathew 134532ed618SSoby Mathew ret 135532ed618SSoby Mathewendfunc fpregs_context_restore 136532ed618SSoby Mathew#endif /* CTX_INCLUDE_FPREGS */ 137532ed618SSoby Mathew 138*6d5319afSMadhukar Pappireddy#if CTX_INCLUDE_SVE_REGS 139*6d5319afSMadhukar Pappireddy/* 140*6d5319afSMadhukar Pappireddy * Helper macros for SVE predicates save/restore operations. 141*6d5319afSMadhukar Pappireddy */ 142*6d5319afSMadhukar Pappireddy.macro sve_predicate_op op:req reg:req 143*6d5319afSMadhukar Pappireddy \op p0, [\reg, #0, MUL VL] 144*6d5319afSMadhukar Pappireddy \op p1, [\reg, #1, MUL VL] 145*6d5319afSMadhukar Pappireddy \op p2, [\reg, #2, MUL VL] 146*6d5319afSMadhukar Pappireddy \op p3, [\reg, #3, MUL VL] 147*6d5319afSMadhukar Pappireddy \op p4, [\reg, #4, MUL VL] 148*6d5319afSMadhukar Pappireddy \op p5, [\reg, #5, MUL VL] 149*6d5319afSMadhukar Pappireddy \op p6, [\reg, #6, MUL VL] 150*6d5319afSMadhukar Pappireddy \op p7, [\reg, #7, MUL VL] 151*6d5319afSMadhukar Pappireddy \op p8, [\reg, #8, MUL VL] 152*6d5319afSMadhukar Pappireddy \op p9, [\reg, #9, MUL VL] 153*6d5319afSMadhukar Pappireddy \op p10, [\reg, #10, MUL VL] 154*6d5319afSMadhukar Pappireddy \op p11, [\reg, #11, MUL VL] 155*6d5319afSMadhukar Pappireddy \op p12, [\reg, #12, MUL VL] 156*6d5319afSMadhukar Pappireddy \op p13, [\reg, #13, MUL VL] 157*6d5319afSMadhukar Pappireddy \op p14, [\reg, #14, MUL VL] 158*6d5319afSMadhukar Pappireddy \op p15, [\reg, #15, MUL VL] 159*6d5319afSMadhukar Pappireddy.endm 160*6d5319afSMadhukar Pappireddy 161*6d5319afSMadhukar Pappireddy.macro sve_vectors_op op:req reg:req 162*6d5319afSMadhukar Pappireddy \op z0, [\reg, #0, MUL VL] 163*6d5319afSMadhukar Pappireddy \op z1, [\reg, #1, MUL VL] 164*6d5319afSMadhukar Pappireddy \op z2, [\reg, #2, MUL VL] 165*6d5319afSMadhukar Pappireddy \op z3, [\reg, #3, MUL VL] 166*6d5319afSMadhukar Pappireddy \op z4, [\reg, #4, MUL VL] 167*6d5319afSMadhukar Pappireddy \op z5, [\reg, #5, MUL VL] 168*6d5319afSMadhukar Pappireddy \op z6, [\reg, #6, MUL VL] 169*6d5319afSMadhukar Pappireddy \op z7, [\reg, #7, MUL VL] 170*6d5319afSMadhukar Pappireddy \op z8, [\reg, #8, MUL VL] 171*6d5319afSMadhukar Pappireddy \op z9, [\reg, #9, MUL VL] 172*6d5319afSMadhukar Pappireddy \op z10, [\reg, #10, MUL VL] 173*6d5319afSMadhukar Pappireddy \op z11, [\reg, #11, MUL VL] 174*6d5319afSMadhukar Pappireddy \op z12, [\reg, #12, MUL VL] 175*6d5319afSMadhukar Pappireddy \op z13, [\reg, #13, MUL VL] 176*6d5319afSMadhukar Pappireddy \op z14, [\reg, #14, MUL VL] 177*6d5319afSMadhukar Pappireddy \op z15, [\reg, #15, MUL VL] 178*6d5319afSMadhukar Pappireddy \op z16, [\reg, #16, MUL VL] 179*6d5319afSMadhukar Pappireddy \op z17, [\reg, #17, MUL VL] 180*6d5319afSMadhukar Pappireddy \op z18, [\reg, #18, MUL VL] 181*6d5319afSMadhukar Pappireddy \op z19, [\reg, #19, MUL VL] 182*6d5319afSMadhukar Pappireddy \op z20, [\reg, #20, MUL VL] 183*6d5319afSMadhukar Pappireddy \op z21, [\reg, #21, MUL VL] 184*6d5319afSMadhukar Pappireddy \op z22, [\reg, #22, MUL VL] 185*6d5319afSMadhukar Pappireddy \op z23, [\reg, #23, MUL VL] 186*6d5319afSMadhukar Pappireddy \op z24, [\reg, #24, MUL VL] 187*6d5319afSMadhukar Pappireddy \op z25, [\reg, #25, MUL VL] 188*6d5319afSMadhukar Pappireddy \op z26, [\reg, #26, MUL VL] 189*6d5319afSMadhukar Pappireddy \op z27, [\reg, #27, MUL VL] 190*6d5319afSMadhukar Pappireddy \op z28, [\reg, #28, MUL VL] 191*6d5319afSMadhukar Pappireddy \op z29, [\reg, #29, MUL VL] 192*6d5319afSMadhukar Pappireddy \op z30, [\reg, #30, MUL VL] 193*6d5319afSMadhukar Pappireddy \op z31, [\reg, #31, MUL VL] 194*6d5319afSMadhukar Pappireddy.endm 195*6d5319afSMadhukar Pappireddy 196*6d5319afSMadhukar Pappireddy/* ------------------------------------------------------------------ 197*6d5319afSMadhukar Pappireddy * The following function follows the aapcs_64 strictly to use x9-x17 198*6d5319afSMadhukar Pappireddy * (temporary caller-saved registers according to AArch64 PCS) to 199*6d5319afSMadhukar Pappireddy * restore SVE register context. It assumes that 'x0' is 200*6d5319afSMadhukar Pappireddy * pointing to a 'sve_regs_t' structure to which the register context 201*6d5319afSMadhukar Pappireddy * will be saved. 202*6d5319afSMadhukar Pappireddy * ------------------------------------------------------------------ 203*6d5319afSMadhukar Pappireddy */ 204*6d5319afSMadhukar Pappireddyfunc sve_context_save 205*6d5319afSMadhukar Pappireddy.arch_extension sve 206*6d5319afSMadhukar Pappireddy /* Temporarily enable SVE */ 207*6d5319afSMadhukar Pappireddy mrs x10, cptr_el3 208*6d5319afSMadhukar Pappireddy orr x11, x10, #CPTR_EZ_BIT 209*6d5319afSMadhukar Pappireddy bic x11, x11, #TFP_BIT 210*6d5319afSMadhukar Pappireddy msr cptr_el3, x11 211*6d5319afSMadhukar Pappireddy isb 212*6d5319afSMadhukar Pappireddy 213*6d5319afSMadhukar Pappireddy /* zcr_el3 */ 214*6d5319afSMadhukar Pappireddy mrs x12, S3_6_C1_C2_0 215*6d5319afSMadhukar Pappireddy mov x13, #((SVE_VECTOR_LEN >> 7) - 1) 216*6d5319afSMadhukar Pappireddy msr S3_6_C1_C2_0, x13 217*6d5319afSMadhukar Pappireddy isb 218*6d5319afSMadhukar Pappireddy 219*6d5319afSMadhukar Pappireddy /* Predicate registers */ 220*6d5319afSMadhukar Pappireddy mov x13, #CTX_SIMD_PREDICATES 221*6d5319afSMadhukar Pappireddy add x9, x0, x13 222*6d5319afSMadhukar Pappireddy sve_predicate_op str, x9 223*6d5319afSMadhukar Pappireddy 224*6d5319afSMadhukar Pappireddy /* Save FFR after predicates */ 225*6d5319afSMadhukar Pappireddy mov x13, #CTX_SIMD_FFR 226*6d5319afSMadhukar Pappireddy add x9, x0, x13 227*6d5319afSMadhukar Pappireddy rdffr p0.b 228*6d5319afSMadhukar Pappireddy str p0, [x9] 229*6d5319afSMadhukar Pappireddy 230*6d5319afSMadhukar Pappireddy /* Save vector registers */ 231*6d5319afSMadhukar Pappireddy mov x13, #CTX_SIMD_VECTORS 232*6d5319afSMadhukar Pappireddy add x9, x0, x13 233*6d5319afSMadhukar Pappireddy sve_vectors_op str, x9 234*6d5319afSMadhukar Pappireddy 235*6d5319afSMadhukar Pappireddy /* Restore SVE enablement */ 236*6d5319afSMadhukar Pappireddy msr S3_6_C1_C2_0, x12 /* zcr_el3 */ 237*6d5319afSMadhukar Pappireddy msr cptr_el3, x10 238*6d5319afSMadhukar Pappireddy isb 239*6d5319afSMadhukar Pappireddy.arch_extension nosve 240*6d5319afSMadhukar Pappireddy 241*6d5319afSMadhukar Pappireddy /* Save FPSR, FPCR and FPEXC32 */ 242*6d5319afSMadhukar Pappireddy fpregs_state_save x0, x9 243*6d5319afSMadhukar Pappireddy 244*6d5319afSMadhukar Pappireddy ret 245*6d5319afSMadhukar Pappireddyendfunc sve_context_save 246*6d5319afSMadhukar Pappireddy 247*6d5319afSMadhukar Pappireddy/* ------------------------------------------------------------------ 248*6d5319afSMadhukar Pappireddy * The following function follows the aapcs_64 strictly to use x9-x17 249*6d5319afSMadhukar Pappireddy * (temporary caller-saved registers according to AArch64 PCS) to 250*6d5319afSMadhukar Pappireddy * restore SVE register context. It assumes that 'x0' is pointing to 251*6d5319afSMadhukar Pappireddy * a 'sve_regs_t' structure from where the register context will be 252*6d5319afSMadhukar Pappireddy * restored. 253*6d5319afSMadhukar Pappireddy * ------------------------------------------------------------------ 254*6d5319afSMadhukar Pappireddy */ 255*6d5319afSMadhukar Pappireddyfunc sve_context_restore 256*6d5319afSMadhukar Pappireddy.arch_extension sve 257*6d5319afSMadhukar Pappireddy /* Temporarily enable SVE for EL3 */ 258*6d5319afSMadhukar Pappireddy mrs x10, cptr_el3 259*6d5319afSMadhukar Pappireddy orr x11, x10, #CPTR_EZ_BIT 260*6d5319afSMadhukar Pappireddy bic x11, x11, #TFP_BIT 261*6d5319afSMadhukar Pappireddy msr cptr_el3, x11 262*6d5319afSMadhukar Pappireddy isb 263*6d5319afSMadhukar Pappireddy 264*6d5319afSMadhukar Pappireddy /* zcr_el3 */ 265*6d5319afSMadhukar Pappireddy mrs x12, S3_6_C1_C2_0 266*6d5319afSMadhukar Pappireddy mov x13, #((SVE_VECTOR_LEN >> 7) - 1) 267*6d5319afSMadhukar Pappireddy msr S3_6_C1_C2_0, x13 268*6d5319afSMadhukar Pappireddy isb 269*6d5319afSMadhukar Pappireddy 270*6d5319afSMadhukar Pappireddy /* Restore FFR register before predicates */ 271*6d5319afSMadhukar Pappireddy mov x13, #CTX_SIMD_FFR 272*6d5319afSMadhukar Pappireddy add x9, x0, x13 273*6d5319afSMadhukar Pappireddy ldr p0, [x9] 274*6d5319afSMadhukar Pappireddy wrffr p0.b 275*6d5319afSMadhukar Pappireddy 276*6d5319afSMadhukar Pappireddy /* Restore predicate registers */ 277*6d5319afSMadhukar Pappireddy mov x13, #CTX_SIMD_PREDICATES 278*6d5319afSMadhukar Pappireddy add x9, x0, x13 279*6d5319afSMadhukar Pappireddy sve_predicate_op ldr, x9 280*6d5319afSMadhukar Pappireddy 281*6d5319afSMadhukar Pappireddy /* Restore vector registers */ 282*6d5319afSMadhukar Pappireddy mov x13, #CTX_SIMD_VECTORS 283*6d5319afSMadhukar Pappireddy add x9, x0, x13 284*6d5319afSMadhukar Pappireddy sve_vectors_op ldr, x9 285*6d5319afSMadhukar Pappireddy 286*6d5319afSMadhukar Pappireddy /* Restore SVE enablement */ 287*6d5319afSMadhukar Pappireddy msr S3_6_C1_C2_0, x12 /* zcr_el3 */ 288*6d5319afSMadhukar Pappireddy msr cptr_el3, x10 289*6d5319afSMadhukar Pappireddy isb 290*6d5319afSMadhukar Pappireddy.arch_extension nosve 291*6d5319afSMadhukar Pappireddy 292*6d5319afSMadhukar Pappireddy /* Restore FPSR, FPCR and FPEXC32 */ 293*6d5319afSMadhukar Pappireddy fpregs_state_restore x0, x9 294*6d5319afSMadhukar Pappireddy ret 295*6d5319afSMadhukar Pappireddyendfunc sve_context_restore 296*6d5319afSMadhukar Pappireddy#endif /* CTX_INCLUDE_SVE_REGS */ 297*6d5319afSMadhukar Pappireddy 2987d33ffe4SDaniel Boulby /* 2991cbe42a5SManish Pandey * Set SCR_EL3.EA bit to enable SErrors at EL3 3001cbe42a5SManish Pandey */ 3011cbe42a5SManish Pandey .macro enable_serror_at_el3 3021cbe42a5SManish Pandey mrs x8, scr_el3 3031cbe42a5SManish Pandey orr x8, x8, #SCR_EA_BIT 3041cbe42a5SManish Pandey msr scr_el3, x8 3051cbe42a5SManish Pandey .endm 3061cbe42a5SManish Pandey 3071cbe42a5SManish Pandey /* 3087d33ffe4SDaniel Boulby * Set the PSTATE bits not set when the exception was taken as 3097d33ffe4SDaniel Boulby * described in the AArch64.TakeException() pseudocode function 3107d33ffe4SDaniel Boulby * in ARM DDI 0487F.c page J1-7635 to a default value. 3117d33ffe4SDaniel Boulby */ 3127d33ffe4SDaniel Boulby .macro set_unset_pstate_bits 3137d33ffe4SDaniel Boulby /* 3147d33ffe4SDaniel Boulby * If Data Independent Timing (DIT) functionality is implemented, 3157d33ffe4SDaniel Boulby * always enable DIT in EL3 3167d33ffe4SDaniel Boulby */ 3177d33ffe4SDaniel Boulby#if ENABLE_FEAT_DIT 31888727fc3SAndre Przywara#if ENABLE_FEAT_DIT == 2 31988727fc3SAndre Przywara mrs x8, id_aa64pfr0_el1 32088727fc3SAndre Przywara and x8, x8, #(ID_AA64PFR0_DIT_MASK << ID_AA64PFR0_DIT_SHIFT) 32188727fc3SAndre Przywara cbz x8, 1f 32288727fc3SAndre Przywara#endif 3237d33ffe4SDaniel Boulby mov x8, #DIT_BIT 3247d33ffe4SDaniel Boulby msr DIT, x8 32588727fc3SAndre Przywara1: 3267d33ffe4SDaniel Boulby#endif /* ENABLE_FEAT_DIT */ 3277d33ffe4SDaniel Boulby .endm /* set_unset_pstate_bits */ 3287d33ffe4SDaniel Boulby 329edebefbcSArvind Ram Prakash/*------------------------------------------------------------------------- 330edebefbcSArvind Ram Prakash * This macro checks the ENABLE_FEAT_MPAM state, performs ID register 331edebefbcSArvind Ram Prakash * check to see if the platform supports MPAM extension and restores MPAM3 332edebefbcSArvind Ram Prakash * register value if it is FEAT_STATE_ENABLED/FEAT_STATE_CHECKED. 333edebefbcSArvind Ram Prakash * 334edebefbcSArvind Ram Prakash * This is particularly more complicated because we can't check 335edebefbcSArvind Ram Prakash * if the platform supports MPAM by looking for status of a particular bit 336edebefbcSArvind Ram Prakash * in the MDCR_EL3 or CPTR_EL3 register like other extensions. 337edebefbcSArvind Ram Prakash * ------------------------------------------------------------------------ 338edebefbcSArvind Ram Prakash */ 339edebefbcSArvind Ram Prakash 340edebefbcSArvind Ram Prakash .macro restore_mpam3_el3 341edebefbcSArvind Ram Prakash#if ENABLE_FEAT_MPAM 342edebefbcSArvind Ram Prakash#if ENABLE_FEAT_MPAM == 2 343edebefbcSArvind Ram Prakash 344edebefbcSArvind Ram Prakash mrs x8, id_aa64pfr0_el1 345edebefbcSArvind Ram Prakash lsr x8, x8, #(ID_AA64PFR0_MPAM_SHIFT) 346edebefbcSArvind Ram Prakash and x8, x8, #(ID_AA64PFR0_MPAM_MASK) 347edebefbcSArvind Ram Prakash mrs x7, id_aa64pfr1_el1 348edebefbcSArvind Ram Prakash lsr x7, x7, #(ID_AA64PFR1_MPAM_FRAC_SHIFT) 349edebefbcSArvind Ram Prakash and x7, x7, #(ID_AA64PFR1_MPAM_FRAC_MASK) 350edebefbcSArvind Ram Prakash orr x7, x7, x8 351edebefbcSArvind Ram Prakash cbz x7, no_mpam 352edebefbcSArvind Ram Prakash#endif 353edebefbcSArvind Ram Prakash /* ----------------------------------------------------------- 354edebefbcSArvind Ram Prakash * Restore MPAM3_EL3 register as per context state 355edebefbcSArvind Ram Prakash * Currently we only enable MPAM for NS world and trap to EL3 356edebefbcSArvind Ram Prakash * for MPAM access in lower ELs of Secure and Realm world 357ac4f6aafSArvind Ram Prakash * x9 holds address of the per_world context 358edebefbcSArvind Ram Prakash * ----------------------------------------------------------- 359edebefbcSArvind Ram Prakash */ 360ac4f6aafSArvind Ram Prakash 361ac4f6aafSArvind Ram Prakash ldr x17, [x9, #CTX_MPAM3_EL3] 362edebefbcSArvind Ram Prakash msr S3_6_C10_C5_0, x17 /* mpam3_el3 */ 363edebefbcSArvind Ram Prakash 364edebefbcSArvind Ram Prakashno_mpam: 365edebefbcSArvind Ram Prakash#endif 366edebefbcSArvind Ram Prakash .endm /* restore_mpam3_el3 */ 367edebefbcSArvind Ram Prakash 368ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 36997215e0fSDaniel Boulby * The following macro is used to save and restore all the general 370ed108b56SAlexei Fedorov * purpose and ARMv8.3-PAuth (if enabled) registers. 371d64bfef5SJayanth Dodderi Chidanand * It also checks if the Secure Cycle Counter (PMCCNTR_EL0) 372d64bfef5SJayanth Dodderi Chidanand * is disabled in EL3/Secure (ARMv8.5-PMU), wherein PMCCNTR_EL0 373d64bfef5SJayanth Dodderi Chidanand * needs not to be saved/restored during world switch. 374ed108b56SAlexei Fedorov * 375ed108b56SAlexei Fedorov * Ideally we would only save and restore the callee saved registers 376ed108b56SAlexei Fedorov * when a world switch occurs but that type of implementation is more 377ed108b56SAlexei Fedorov * complex. So currently we will always save and restore these 378ed108b56SAlexei Fedorov * registers on entry and exit of EL3. 379532ed618SSoby Mathew * clobbers: x18 380ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 381532ed618SSoby Mathew */ 38297215e0fSDaniel Boulby .macro save_gp_pmcr_pauth_regs 383532ed618SSoby Mathew stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 384532ed618SSoby Mathew stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 385532ed618SSoby Mathew stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 386532ed618SSoby Mathew stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] 387532ed618SSoby Mathew stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] 388532ed618SSoby Mathew stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] 389532ed618SSoby Mathew stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] 390532ed618SSoby Mathew stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] 391532ed618SSoby Mathew stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] 392532ed618SSoby Mathew stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] 393532ed618SSoby Mathew stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] 394532ed618SSoby Mathew stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] 395532ed618SSoby Mathew stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] 396532ed618SSoby Mathew stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] 397532ed618SSoby Mathew stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] 398532ed618SSoby Mathew mrs x18, sp_el0 399532ed618SSoby Mathew str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] 400c73686a1SBoyan Karatotev 401c73686a1SBoyan Karatotev /* PMUv3 is presumed to be always present */ 402ed108b56SAlexei Fedorov mrs x9, pmcr_el0 403ed108b56SAlexei Fedorov str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] 404ed108b56SAlexei Fedorov /* Disable cycle counter when event counting is prohibited */ 4051d6d6802SBoyan Karatotev orr x9, x9, #PMCR_EL0_DP_BIT 406ed108b56SAlexei Fedorov msr pmcr_el0, x9 407ed108b56SAlexei Fedorov isb 408ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS 409ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 410ed108b56SAlexei Fedorov * Save the ARMv8.3-PAuth keys as they are not banked 411ed108b56SAlexei Fedorov * by exception level 412ed108b56SAlexei Fedorov * ---------------------------------------------------------- 413ed108b56SAlexei Fedorov */ 414ed108b56SAlexei Fedorov add x19, sp, #CTX_PAUTH_REGS_OFFSET 415ed108b56SAlexei Fedorov 416ed108b56SAlexei Fedorov mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */ 417ed108b56SAlexei Fedorov mrs x21, APIAKeyHi_EL1 418ed108b56SAlexei Fedorov mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */ 419ed108b56SAlexei Fedorov mrs x23, APIBKeyHi_EL1 420ed108b56SAlexei Fedorov mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */ 421ed108b56SAlexei Fedorov mrs x25, APDAKeyHi_EL1 422ed108b56SAlexei Fedorov mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */ 423ed108b56SAlexei Fedorov mrs x27, APDBKeyHi_EL1 424ed108b56SAlexei Fedorov mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */ 425ed108b56SAlexei Fedorov mrs x29, APGAKeyHi_EL1 426ed108b56SAlexei Fedorov 427ed108b56SAlexei Fedorov stp x20, x21, [x19, #CTX_PACIAKEY_LO] 428ed108b56SAlexei Fedorov stp x22, x23, [x19, #CTX_PACIBKEY_LO] 429ed108b56SAlexei Fedorov stp x24, x25, [x19, #CTX_PACDAKEY_LO] 430ed108b56SAlexei Fedorov stp x26, x27, [x19, #CTX_PACDBKEY_LO] 431ed108b56SAlexei Fedorov stp x28, x29, [x19, #CTX_PACGAKEY_LO] 432ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */ 43397215e0fSDaniel Boulby .endm /* save_gp_pmcr_pauth_regs */ 43497215e0fSDaniel Boulby 43597215e0fSDaniel Boulby/* ----------------------------------------------------------------- 4367d33ffe4SDaniel Boulby * This function saves the context and sets the PSTATE to a known 4377d33ffe4SDaniel Boulby * state, preparing entry to el3. 43897215e0fSDaniel Boulby * Save all the general purpose and ARMv8.3-PAuth (if enabled) 43997215e0fSDaniel Boulby * registers. 4407d33ffe4SDaniel Boulby * Then set any of the PSTATE bits that are not set by hardware 4417d33ffe4SDaniel Boulby * according to the Aarch64.TakeException pseudocode in the Arm 4427d33ffe4SDaniel Boulby * Architecture Reference Manual to a default value for EL3. 4437d33ffe4SDaniel Boulby * clobbers: x17 44497215e0fSDaniel Boulby * ----------------------------------------------------------------- 44597215e0fSDaniel Boulby */ 44697215e0fSDaniel Boulbyfunc prepare_el3_entry 44797215e0fSDaniel Boulby save_gp_pmcr_pauth_regs 4481cbe42a5SManish Pandey enable_serror_at_el3 4497d33ffe4SDaniel Boulby /* 4507d33ffe4SDaniel Boulby * Set the PSTATE bits not described in the Aarch64.TakeException 4517d33ffe4SDaniel Boulby * pseudocode to their default values. 4527d33ffe4SDaniel Boulby */ 4537d33ffe4SDaniel Boulby set_unset_pstate_bits 454ed108b56SAlexei Fedorov ret 45597215e0fSDaniel Boulbyendfunc prepare_el3_entry 456ed108b56SAlexei Fedorov 457ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 458ed108b56SAlexei Fedorov * This function restores ARMv8.3-PAuth (if enabled) and all general 459ed108b56SAlexei Fedorov * purpose registers except x30 from the CPU context. 460ed108b56SAlexei Fedorov * x30 register must be explicitly restored by the caller. 461ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 462ed108b56SAlexei Fedorov */ 463ed108b56SAlexei Fedorovfunc restore_gp_pmcr_pauth_regs 464ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS 465ed108b56SAlexei Fedorov /* Restore the ARMv8.3 PAuth keys */ 466ed108b56SAlexei Fedorov add x10, sp, #CTX_PAUTH_REGS_OFFSET 467ed108b56SAlexei Fedorov 468ed108b56SAlexei Fedorov ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */ 469ed108b56SAlexei Fedorov ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */ 470ed108b56SAlexei Fedorov ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */ 471ed108b56SAlexei Fedorov ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */ 472ed108b56SAlexei Fedorov ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */ 473ed108b56SAlexei Fedorov 474ed108b56SAlexei Fedorov msr APIAKeyLo_EL1, x0 475ed108b56SAlexei Fedorov msr APIAKeyHi_EL1, x1 476ed108b56SAlexei Fedorov msr APIBKeyLo_EL1, x2 477ed108b56SAlexei Fedorov msr APIBKeyHi_EL1, x3 478ed108b56SAlexei Fedorov msr APDAKeyLo_EL1, x4 479ed108b56SAlexei Fedorov msr APDAKeyHi_EL1, x5 480ed108b56SAlexei Fedorov msr APDBKeyLo_EL1, x6 481ed108b56SAlexei Fedorov msr APDBKeyHi_EL1, x7 482ed108b56SAlexei Fedorov msr APGAKeyLo_EL1, x8 483ed108b56SAlexei Fedorov msr APGAKeyHi_EL1, x9 484ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */ 485c73686a1SBoyan Karatotev 486c73686a1SBoyan Karatotev /* PMUv3 is presumed to be always present */ 487ed108b56SAlexei Fedorov ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] 488ed108b56SAlexei Fedorov msr pmcr_el0, x0 489532ed618SSoby Mathew ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 490532ed618SSoby Mathew ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 491532ed618SSoby Mathew ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 492532ed618SSoby Mathew ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] 493532ed618SSoby Mathew ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] 494532ed618SSoby Mathew ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] 495532ed618SSoby Mathew ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] 496532ed618SSoby Mathew ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] 497ef653d93SJeenu Viswambharan ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] 498532ed618SSoby Mathew ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] 499532ed618SSoby Mathew ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] 500532ed618SSoby Mathew ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] 501532ed618SSoby Mathew ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] 502532ed618SSoby Mathew ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] 503ef653d93SJeenu Viswambharan ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] 504ef653d93SJeenu Viswambharan msr sp_el0, x28 505532ed618SSoby Mathew ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] 506ef653d93SJeenu Viswambharan ret 507ed108b56SAlexei Fedorovendfunc restore_gp_pmcr_pauth_regs 508ef653d93SJeenu Viswambharan 50959b7c0a0SJayanth Dodderi Chidanand#if ERRATA_SPECULATIVE_AT 51059b7c0a0SJayanth Dodderi Chidanand/* -------------------------------------------------------------------- 5113b8456bdSManish V Badarkhe * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1 5123b8456bdSManish V Badarkhe * registers and update EL1 registers to disable stage1 and stage2 51359b7c0a0SJayanth Dodderi Chidanand * page table walk. 51459b7c0a0SJayanth Dodderi Chidanand * -------------------------------------------------------------------- 5153b8456bdSManish V Badarkhe */ 5163b8456bdSManish V Badarkhefunc save_and_update_ptw_el1_sys_regs 5173b8456bdSManish V Badarkhe /* ---------------------------------------------------------- 5183b8456bdSManish V Badarkhe * Save only sctlr_el1 and tcr_el1 registers 5193b8456bdSManish V Badarkhe * ---------------------------------------------------------- 5203b8456bdSManish V Badarkhe */ 5213b8456bdSManish V Badarkhe mrs x29, sctlr_el1 52259b7c0a0SJayanth Dodderi Chidanand str x29, [sp, #(CTX_ERRATA_SPEC_AT_OFFSET + CTX_ERRATA_SPEC_AT_SCTLR_EL1)] 5233b8456bdSManish V Badarkhe mrs x29, tcr_el1 52459b7c0a0SJayanth Dodderi Chidanand str x29, [sp, #(CTX_ERRATA_SPEC_AT_OFFSET + CTX_ERRATA_SPEC_AT_TCR_EL1)] 5253b8456bdSManish V Badarkhe 5263b8456bdSManish V Badarkhe /* ------------------------------------------------------------ 5273b8456bdSManish V Badarkhe * Must follow below order in order to disable page table 5283b8456bdSManish V Badarkhe * walk for lower ELs (EL1 and EL0). First step ensures that 5293b8456bdSManish V Badarkhe * page table walk is disabled for stage1 and second step 5303b8456bdSManish V Badarkhe * ensures that page table walker should use TCR_EL1.EPDx 5313b8456bdSManish V Badarkhe * bits to perform address translation. ISB ensures that CPU 5323b8456bdSManish V Badarkhe * does these 2 steps in order. 5333b8456bdSManish V Badarkhe * 5343b8456bdSManish V Badarkhe * 1. Update TCR_EL1.EPDx bits to disable page table walk by 5353b8456bdSManish V Badarkhe * stage1. 5363b8456bdSManish V Badarkhe * 2. Enable MMU bit to avoid identity mapping via stage2 5373b8456bdSManish V Badarkhe * and force TCR_EL1.EPDx to be used by the page table 5383b8456bdSManish V Badarkhe * walker. 5393b8456bdSManish V Badarkhe * ------------------------------------------------------------ 5403b8456bdSManish V Badarkhe */ 5413b8456bdSManish V Badarkhe orr x29, x29, #(TCR_EPD0_BIT) 5423b8456bdSManish V Badarkhe orr x29, x29, #(TCR_EPD1_BIT) 5433b8456bdSManish V Badarkhe msr tcr_el1, x29 5443b8456bdSManish V Badarkhe isb 5453b8456bdSManish V Badarkhe mrs x29, sctlr_el1 5463b8456bdSManish V Badarkhe orr x29, x29, #SCTLR_M_BIT 5473b8456bdSManish V Badarkhe msr sctlr_el1, x29 5483b8456bdSManish V Badarkhe isb 5493b8456bdSManish V Badarkhe ret 5503b8456bdSManish V Badarkheendfunc save_and_update_ptw_el1_sys_regs 5513b8456bdSManish V Badarkhe 55259b7c0a0SJayanth Dodderi Chidanand#endif /* ERRATA_SPECULATIVE_AT */ 55359b7c0a0SJayanth Dodderi Chidanand 554461c0a5dSElizabeth Ho/* ----------------------------------------------------------------- 555461c0a5dSElizabeth Ho* The below macro returns the address of the per_world context for 556461c0a5dSElizabeth Ho* the security state, retrieved through "get_security_state" macro. 557461c0a5dSElizabeth Ho* The per_world context address is returned in the register argument. 558461c0a5dSElizabeth Ho* Clobbers: x9, x10 559461c0a5dSElizabeth Ho* ------------------------------------------------------------------ 560461c0a5dSElizabeth Ho*/ 561461c0a5dSElizabeth Ho 562461c0a5dSElizabeth Ho.macro get_per_world_context _reg:req 563461c0a5dSElizabeth Ho ldr x10, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] 564461c0a5dSElizabeth Ho get_security_state x9, x10 5654087ed6cSJayanth Dodderi Chidanand mov_imm x10, (CTX_PERWORLD_EL3STATE_END - CTX_CPTR_EL3) 566461c0a5dSElizabeth Ho mul x9, x9, x10 567461c0a5dSElizabeth Ho adrp x10, per_world_context 568461c0a5dSElizabeth Ho add x10, x10, :lo12:per_world_context 569461c0a5dSElizabeth Ho add x9, x9, x10 570461c0a5dSElizabeth Ho mov \_reg, x9 571461c0a5dSElizabeth Ho.endm 572461c0a5dSElizabeth Ho 573ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 574ed108b56SAlexei Fedorov * This routine assumes that the SP_EL3 is pointing to a valid 575ed108b56SAlexei Fedorov * context structure from where the gp regs and other special 576ed108b56SAlexei Fedorov * registers can be retrieved. 577ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 578532ed618SSoby Mathew */ 579532ed618SSoby Mathewfunc el3_exit 580bb9549baSJan Dabros#if ENABLE_ASSERTIONS 581bb9549baSJan Dabros /* el3_exit assumes SP_EL0 on entry */ 582bb9549baSJan Dabros mrs x17, spsel 583bb9549baSJan Dabros cmp x17, #MODE_SP_EL0 584bb9549baSJan Dabros ASM_ASSERT(eq) 5850ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_ASSERTIONS */ 586bb9549baSJan Dabros 587ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 588ed108b56SAlexei Fedorov * Save the current SP_EL0 i.e. the EL3 runtime stack which 589ed108b56SAlexei Fedorov * will be used for handling the next SMC. 590ed108b56SAlexei Fedorov * Then switch to SP_EL3. 591ed108b56SAlexei Fedorov * ---------------------------------------------------------- 592532ed618SSoby Mathew */ 593532ed618SSoby Mathew mov x17, sp 594ed108b56SAlexei Fedorov msr spsel, #MODE_SP_ELX 595532ed618SSoby Mathew str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] 596532ed618SSoby Mathew 5970c5e7d1cSMax Shvetsov /* ---------------------------------------------------------- 59868ac5ed0SArunachalam Ganapathy * Restore CPTR_EL3. 5990c5e7d1cSMax Shvetsov * ZCR is only restored if SVE is supported and enabled. 6000c5e7d1cSMax Shvetsov * Synchronization is required before zcr_el3 is addressed. 6010c5e7d1cSMax Shvetsov * ---------------------------------------------------------- 6020c5e7d1cSMax Shvetsov */ 603461c0a5dSElizabeth Ho 604461c0a5dSElizabeth Ho /* The address of the per_world context is stored in x9 */ 605461c0a5dSElizabeth Ho get_per_world_context x9 606461c0a5dSElizabeth Ho 607461c0a5dSElizabeth Ho ldp x19, x20, [x9, #CTX_CPTR_EL3] 6080c5e7d1cSMax Shvetsov msr cptr_el3, x19 6090c5e7d1cSMax Shvetsov 610f0c96a2eSBoyan Karatotev#if IMAGE_BL31 6110c5e7d1cSMax Shvetsov ands x19, x19, #CPTR_EZ_BIT 6120c5e7d1cSMax Shvetsov beq sve_not_enabled 6130c5e7d1cSMax Shvetsov 6140c5e7d1cSMax Shvetsov isb 6150c5e7d1cSMax Shvetsov msr S3_6_C1_C2_0, x20 /* zcr_el3 */ 6160c5e7d1cSMax Shvetsovsve_not_enabled: 617edebefbcSArvind Ram Prakash 618edebefbcSArvind Ram Prakash restore_mpam3_el3 619edebefbcSArvind Ram Prakash 6200ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 */ 6210c5e7d1cSMax Shvetsov 622fe007b2eSDimitris Papastamos#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 623ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 624ed108b56SAlexei Fedorov * Restore mitigation state as it was on entry to EL3 625ed108b56SAlexei Fedorov * ---------------------------------------------------------- 626ed108b56SAlexei Fedorov */ 627fe007b2eSDimitris Papastamos ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE] 628ed108b56SAlexei Fedorov cbz x17, 1f 629fe007b2eSDimitris Papastamos blr x17 6304d1ccf0eSAntonio Nino Diaz1: 6310ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */ 6320ce220afSJayanth Dodderi Chidanand 6336597fcf1SManish Pandey#if IMAGE_BL31 6346597fcf1SManish Pandey synchronize_errors 6356597fcf1SManish Pandey#endif /* IMAGE_BL31 */ 6360ce220afSJayanth Dodderi Chidanand 637123002f9SJayanth Dodderi Chidanand /* -------------------------------------------------------------- 638123002f9SJayanth Dodderi Chidanand * Restore MDCR_EL3, SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET 639123002f9SJayanth Dodderi Chidanand * -------------------------------------------------------------- 640ff1d2ef3SManish Pandey */ 641ff1d2ef3SManish Pandey ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] 642123002f9SJayanth Dodderi Chidanand ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] 643123002f9SJayanth Dodderi Chidanand ldr x19, [sp, #CTX_EL3STATE_OFFSET + CTX_MDCR_EL3] 644ff1d2ef3SManish Pandey msr spsr_el3, x16 645ff1d2ef3SManish Pandey msr elr_el3, x17 646123002f9SJayanth Dodderi Chidanand msr scr_el3, x18 647123002f9SJayanth Dodderi Chidanand msr mdcr_el3, x19 648ff1d2ef3SManish Pandey 649ff1d2ef3SManish Pandey restore_ptw_el1_sys_regs 650ff1d2ef3SManish Pandey 651ff1d2ef3SManish Pandey /* ---------------------------------------------------------- 652ff1d2ef3SManish Pandey * Restore general purpose (including x30), PMCR_EL0 and 653ff1d2ef3SManish Pandey * ARMv8.3-PAuth registers. 654ff1d2ef3SManish Pandey * Exit EL3 via ERET to a lower exception level. 655ff1d2ef3SManish Pandey * ---------------------------------------------------------- 656ff1d2ef3SManish Pandey */ 657ff1d2ef3SManish Pandey bl restore_gp_pmcr_pauth_regs 658ff1d2ef3SManish Pandey ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 659ff1d2ef3SManish Pandey 660c2d32a5fSMadhukar Pappireddy#ifdef IMAGE_BL31 661d04c04a4SManish Pandey /* Clear the EL3 flag as we are exiting el3 */ 662d04c04a4SManish Pandey str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_NESTED_EA_FLAG] 6630ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 */ 6640ce220afSJayanth Dodderi Chidanand 665f461fe34SAnthony Steinhauser exception_return 6665283962eSAntonio Nino Diaz 667532ed618SSoby Mathewendfunc el3_exit 668