1532ed618SSoby Mathew/* 230788a84SGovindraj Raja * Copyright (c) 2013-2024, Arm Limited and Contributors. All rights reserved. 3532ed618SSoby Mathew * 482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause 5532ed618SSoby Mathew */ 6532ed618SSoby Mathew 7532ed618SSoby Mathew#include <arch.h> 8532ed618SSoby Mathew#include <asm_macros.S> 9bb9549baSJan Dabros#include <assert_macros.S> 10532ed618SSoby Mathew#include <context.h> 113b8456bdSManish V Badarkhe#include <el3_common_macros.S> 126d5319afSMadhukar Pappireddy#include <platform_def.h> 13532ed618SSoby Mathew 14532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS 15532ed618SSoby Mathew .global fpregs_context_save 16532ed618SSoby Mathew .global fpregs_context_restore 170ce220afSJayanth Dodderi Chidanand#endif /* CTX_INCLUDE_FPREGS */ 1859b7c0a0SJayanth Dodderi Chidanand 196d5319afSMadhukar Pappireddy#if CTX_INCLUDE_SVE_REGS 206d5319afSMadhukar Pappireddy .global sve_context_save 216d5319afSMadhukar Pappireddy .global sve_context_restore 226d5319afSMadhukar Pappireddy#endif /* CTX_INCLUDE_SVE_REGS */ 236d5319afSMadhukar Pappireddy 2459b7c0a0SJayanth Dodderi Chidanand#if ERRATA_SPECULATIVE_AT 2559b7c0a0SJayanth Dodderi Chidanand .global save_and_update_ptw_el1_sys_regs 2659b7c0a0SJayanth Dodderi Chidanand#endif /* ERRATA_SPECULATIVE_AT */ 2759b7c0a0SJayanth Dodderi Chidanand 2897215e0fSDaniel Boulby .global prepare_el3_entry 29ed108b56SAlexei Fedorov .global restore_gp_pmcr_pauth_regs 30532ed618SSoby Mathew .global el3_exit 31532ed618SSoby Mathew 326d5319afSMadhukar Pappireddy/* Following macros will be used if any of CTX_INCLUDE_FPREGS or CTX_INCLUDE_SVE_REGS is enabled */ 336d5319afSMadhukar Pappireddy#if CTX_INCLUDE_FPREGS || CTX_INCLUDE_SVE_REGS 346d5319afSMadhukar Pappireddy.macro fpregs_state_save base:req hold:req 356d5319afSMadhukar Pappireddy mrs \hold, fpsr 366d5319afSMadhukar Pappireddy str \hold, [\base, #CTX_SIMD_FPSR] 376d5319afSMadhukar Pappireddy 386d5319afSMadhukar Pappireddy mrs \hold, fpcr 396d5319afSMadhukar Pappireddy str \hold, [\base, #CTX_SIMD_FPCR] 406d5319afSMadhukar Pappireddy 416d5319afSMadhukar Pappireddy#if CTX_INCLUDE_AARCH32_REGS && CTX_INCLUDE_FPREGS 426d5319afSMadhukar Pappireddy mrs \hold, fpexc32_el2 436d5319afSMadhukar Pappireddy str \hold, [\base, #CTX_SIMD_FPEXC32] 446d5319afSMadhukar Pappireddy#endif 456d5319afSMadhukar Pappireddy.endm 466d5319afSMadhukar Pappireddy 476d5319afSMadhukar Pappireddy.macro fpregs_state_restore base:req hold:req 486d5319afSMadhukar Pappireddy ldr \hold, [\base, #CTX_SIMD_FPSR] 496d5319afSMadhukar Pappireddy msr fpsr, \hold 506d5319afSMadhukar Pappireddy 516d5319afSMadhukar Pappireddy ldr \hold, [\base, #CTX_SIMD_FPCR] 526d5319afSMadhukar Pappireddy msr fpcr, \hold 536d5319afSMadhukar Pappireddy 546d5319afSMadhukar Pappireddy#if CTX_INCLUDE_AARCH32_REGS && CTX_INCLUDE_FPREGS 556d5319afSMadhukar Pappireddy ldr \hold, [\base, #CTX_SIMD_FPEXC32] 566d5319afSMadhukar Pappireddy msr fpexc32_el2, \hold 576d5319afSMadhukar Pappireddy#endif 586d5319afSMadhukar Pappireddy.endm 596d5319afSMadhukar Pappireddy 606d5319afSMadhukar Pappireddy#endif /* CTX_INCLUDE_FPREGS || CTX_INCLUDE_SVE_REGS */ 616d5319afSMadhukar Pappireddy 62ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 63ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use 64ed108b56SAlexei Fedorov * x9-x17 (temporary caller-saved registers according to AArch64 PCS) 65ed108b56SAlexei Fedorov * to save floating point register context. It assumes that 'x0' is 66ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure where the register context will 67532ed618SSoby Mathew * be saved. 68532ed618SSoby Mathew * 69ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set. 70ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in 71ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared. 72532ed618SSoby Mathew * 73532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world 74ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 75532ed618SSoby Mathew */ 76532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS 77532ed618SSoby Mathewfunc fpregs_context_save 786d5319afSMadhukar Pappireddy stp q0, q1, [x0], #32 796d5319afSMadhukar Pappireddy stp q2, q3, [x0], #32 806d5319afSMadhukar Pappireddy stp q4, q5, [x0], #32 816d5319afSMadhukar Pappireddy stp q6, q7, [x0], #32 826d5319afSMadhukar Pappireddy stp q8, q9, [x0], #32 836d5319afSMadhukar Pappireddy stp q10, q11, [x0], #32 846d5319afSMadhukar Pappireddy stp q12, q13, [x0], #32 856d5319afSMadhukar Pappireddy stp q14, q15, [x0], #32 866d5319afSMadhukar Pappireddy stp q16, q17, [x0], #32 876d5319afSMadhukar Pappireddy stp q18, q19, [x0], #32 886d5319afSMadhukar Pappireddy stp q20, q21, [x0], #32 896d5319afSMadhukar Pappireddy stp q22, q23, [x0], #32 906d5319afSMadhukar Pappireddy stp q24, q25, [x0], #32 916d5319afSMadhukar Pappireddy stp q26, q27, [x0], #32 926d5319afSMadhukar Pappireddy stp q28, q29, [x0], #32 936d5319afSMadhukar Pappireddy stp q30, q31, [x0], #32 94532ed618SSoby Mathew 956d5319afSMadhukar Pappireddy fpregs_state_save x0, x9 96532ed618SSoby Mathew 97532ed618SSoby Mathew ret 98532ed618SSoby Mathewendfunc fpregs_context_save 99532ed618SSoby Mathew 100ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 101ed108b56SAlexei Fedorov * The following function follows the aapcs_64 strictly to use x9-x17 102ed108b56SAlexei Fedorov * (temporary caller-saved registers according to AArch64 PCS) to 103ed108b56SAlexei Fedorov * restore floating point register context. It assumes that 'x0' is 104ed108b56SAlexei Fedorov * pointing to a 'fp_regs' structure from where the register context 105532ed618SSoby Mathew * will be restored. 106532ed618SSoby Mathew * 107ed108b56SAlexei Fedorov * Access to VFP registers will trap if CPTR_EL3.TFP is set. 108ed108b56SAlexei Fedorov * However currently we don't use VFP registers nor set traps in 109ed108b56SAlexei Fedorov * Trusted Firmware, and assume it's cleared. 110532ed618SSoby Mathew * 111532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world 112ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 113532ed618SSoby Mathew */ 114532ed618SSoby Mathewfunc fpregs_context_restore 1156d5319afSMadhukar Pappireddy ldp q0, q1, [x0], #32 1166d5319afSMadhukar Pappireddy ldp q2, q3, [x0], #32 1176d5319afSMadhukar Pappireddy ldp q4, q5, [x0], #32 1186d5319afSMadhukar Pappireddy ldp q6, q7, [x0], #32 1196d5319afSMadhukar Pappireddy ldp q8, q9, [x0], #32 1206d5319afSMadhukar Pappireddy ldp q10, q11, [x0], #32 1216d5319afSMadhukar Pappireddy ldp q12, q13, [x0], #32 1226d5319afSMadhukar Pappireddy ldp q14, q15, [x0], #32 1236d5319afSMadhukar Pappireddy ldp q16, q17, [x0], #32 1246d5319afSMadhukar Pappireddy ldp q18, q19, [x0], #32 1256d5319afSMadhukar Pappireddy ldp q20, q21, [x0], #32 1266d5319afSMadhukar Pappireddy ldp q22, q23, [x0], #32 1276d5319afSMadhukar Pappireddy ldp q24, q25, [x0], #32 1286d5319afSMadhukar Pappireddy ldp q26, q27, [x0], #32 1296d5319afSMadhukar Pappireddy ldp q28, q29, [x0], #32 1306d5319afSMadhukar Pappireddy ldp q30, q31, [x0], #32 131532ed618SSoby Mathew 1326d5319afSMadhukar Pappireddy fpregs_state_restore x0, x9 133532ed618SSoby Mathew 134532ed618SSoby Mathew ret 135532ed618SSoby Mathewendfunc fpregs_context_restore 136532ed618SSoby Mathew#endif /* CTX_INCLUDE_FPREGS */ 137532ed618SSoby Mathew 1386d5319afSMadhukar Pappireddy#if CTX_INCLUDE_SVE_REGS 1396d5319afSMadhukar Pappireddy/* 1406d5319afSMadhukar Pappireddy * Helper macros for SVE predicates save/restore operations. 1416d5319afSMadhukar Pappireddy */ 1426d5319afSMadhukar Pappireddy.macro sve_predicate_op op:req reg:req 1436d5319afSMadhukar Pappireddy \op p0, [\reg, #0, MUL VL] 1446d5319afSMadhukar Pappireddy \op p1, [\reg, #1, MUL VL] 1456d5319afSMadhukar Pappireddy \op p2, [\reg, #2, MUL VL] 1466d5319afSMadhukar Pappireddy \op p3, [\reg, #3, MUL VL] 1476d5319afSMadhukar Pappireddy \op p4, [\reg, #4, MUL VL] 1486d5319afSMadhukar Pappireddy \op p5, [\reg, #5, MUL VL] 1496d5319afSMadhukar Pappireddy \op p6, [\reg, #6, MUL VL] 1506d5319afSMadhukar Pappireddy \op p7, [\reg, #7, MUL VL] 1516d5319afSMadhukar Pappireddy \op p8, [\reg, #8, MUL VL] 1526d5319afSMadhukar Pappireddy \op p9, [\reg, #9, MUL VL] 1536d5319afSMadhukar Pappireddy \op p10, [\reg, #10, MUL VL] 1546d5319afSMadhukar Pappireddy \op p11, [\reg, #11, MUL VL] 1556d5319afSMadhukar Pappireddy \op p12, [\reg, #12, MUL VL] 1566d5319afSMadhukar Pappireddy \op p13, [\reg, #13, MUL VL] 1576d5319afSMadhukar Pappireddy \op p14, [\reg, #14, MUL VL] 1586d5319afSMadhukar Pappireddy \op p15, [\reg, #15, MUL VL] 1596d5319afSMadhukar Pappireddy.endm 1606d5319afSMadhukar Pappireddy 1616d5319afSMadhukar Pappireddy.macro sve_vectors_op op:req reg:req 1626d5319afSMadhukar Pappireddy \op z0, [\reg, #0, MUL VL] 1636d5319afSMadhukar Pappireddy \op z1, [\reg, #1, MUL VL] 1646d5319afSMadhukar Pappireddy \op z2, [\reg, #2, MUL VL] 1656d5319afSMadhukar Pappireddy \op z3, [\reg, #3, MUL VL] 1666d5319afSMadhukar Pappireddy \op z4, [\reg, #4, MUL VL] 1676d5319afSMadhukar Pappireddy \op z5, [\reg, #5, MUL VL] 1686d5319afSMadhukar Pappireddy \op z6, [\reg, #6, MUL VL] 1696d5319afSMadhukar Pappireddy \op z7, [\reg, #7, MUL VL] 1706d5319afSMadhukar Pappireddy \op z8, [\reg, #8, MUL VL] 1716d5319afSMadhukar Pappireddy \op z9, [\reg, #9, MUL VL] 1726d5319afSMadhukar Pappireddy \op z10, [\reg, #10, MUL VL] 1736d5319afSMadhukar Pappireddy \op z11, [\reg, #11, MUL VL] 1746d5319afSMadhukar Pappireddy \op z12, [\reg, #12, MUL VL] 1756d5319afSMadhukar Pappireddy \op z13, [\reg, #13, MUL VL] 1766d5319afSMadhukar Pappireddy \op z14, [\reg, #14, MUL VL] 1776d5319afSMadhukar Pappireddy \op z15, [\reg, #15, MUL VL] 1786d5319afSMadhukar Pappireddy \op z16, [\reg, #16, MUL VL] 1796d5319afSMadhukar Pappireddy \op z17, [\reg, #17, MUL VL] 1806d5319afSMadhukar Pappireddy \op z18, [\reg, #18, MUL VL] 1816d5319afSMadhukar Pappireddy \op z19, [\reg, #19, MUL VL] 1826d5319afSMadhukar Pappireddy \op z20, [\reg, #20, MUL VL] 1836d5319afSMadhukar Pappireddy \op z21, [\reg, #21, MUL VL] 1846d5319afSMadhukar Pappireddy \op z22, [\reg, #22, MUL VL] 1856d5319afSMadhukar Pappireddy \op z23, [\reg, #23, MUL VL] 1866d5319afSMadhukar Pappireddy \op z24, [\reg, #24, MUL VL] 1876d5319afSMadhukar Pappireddy \op z25, [\reg, #25, MUL VL] 1886d5319afSMadhukar Pappireddy \op z26, [\reg, #26, MUL VL] 1896d5319afSMadhukar Pappireddy \op z27, [\reg, #27, MUL VL] 1906d5319afSMadhukar Pappireddy \op z28, [\reg, #28, MUL VL] 1916d5319afSMadhukar Pappireddy \op z29, [\reg, #29, MUL VL] 1926d5319afSMadhukar Pappireddy \op z30, [\reg, #30, MUL VL] 1936d5319afSMadhukar Pappireddy \op z31, [\reg, #31, MUL VL] 1946d5319afSMadhukar Pappireddy.endm 1956d5319afSMadhukar Pappireddy 1966d5319afSMadhukar Pappireddy/* ------------------------------------------------------------------ 1976d5319afSMadhukar Pappireddy * The following function follows the aapcs_64 strictly to use x9-x17 1986d5319afSMadhukar Pappireddy * (temporary caller-saved registers according to AArch64 PCS) to 1996d5319afSMadhukar Pappireddy * restore SVE register context. It assumes that 'x0' is 2006d5319afSMadhukar Pappireddy * pointing to a 'sve_regs_t' structure to which the register context 2016d5319afSMadhukar Pappireddy * will be saved. 2026d5319afSMadhukar Pappireddy * ------------------------------------------------------------------ 2036d5319afSMadhukar Pappireddy */ 2046d5319afSMadhukar Pappireddyfunc sve_context_save 2056d5319afSMadhukar Pappireddy.arch_extension sve 2066d5319afSMadhukar Pappireddy /* Temporarily enable SVE */ 2076d5319afSMadhukar Pappireddy mrs x10, cptr_el3 2086d5319afSMadhukar Pappireddy orr x11, x10, #CPTR_EZ_BIT 2096d5319afSMadhukar Pappireddy bic x11, x11, #TFP_BIT 2106d5319afSMadhukar Pappireddy msr cptr_el3, x11 2116d5319afSMadhukar Pappireddy isb 2126d5319afSMadhukar Pappireddy 2136d5319afSMadhukar Pappireddy /* zcr_el3 */ 2146d5319afSMadhukar Pappireddy mrs x12, S3_6_C1_C2_0 2156d5319afSMadhukar Pappireddy mov x13, #((SVE_VECTOR_LEN >> 7) - 1) 2166d5319afSMadhukar Pappireddy msr S3_6_C1_C2_0, x13 2176d5319afSMadhukar Pappireddy isb 2186d5319afSMadhukar Pappireddy 2196d5319afSMadhukar Pappireddy /* Predicate registers */ 2206d5319afSMadhukar Pappireddy mov x13, #CTX_SIMD_PREDICATES 2216d5319afSMadhukar Pappireddy add x9, x0, x13 2226d5319afSMadhukar Pappireddy sve_predicate_op str, x9 2236d5319afSMadhukar Pappireddy 2246d5319afSMadhukar Pappireddy /* Save FFR after predicates */ 2256d5319afSMadhukar Pappireddy mov x13, #CTX_SIMD_FFR 2266d5319afSMadhukar Pappireddy add x9, x0, x13 2276d5319afSMadhukar Pappireddy rdffr p0.b 2286d5319afSMadhukar Pappireddy str p0, [x9] 2296d5319afSMadhukar Pappireddy 2306d5319afSMadhukar Pappireddy /* Save vector registers */ 2316d5319afSMadhukar Pappireddy mov x13, #CTX_SIMD_VECTORS 2326d5319afSMadhukar Pappireddy add x9, x0, x13 2336d5319afSMadhukar Pappireddy sve_vectors_op str, x9 2346d5319afSMadhukar Pappireddy 2356d5319afSMadhukar Pappireddy /* Restore SVE enablement */ 2366d5319afSMadhukar Pappireddy msr S3_6_C1_C2_0, x12 /* zcr_el3 */ 2376d5319afSMadhukar Pappireddy msr cptr_el3, x10 2386d5319afSMadhukar Pappireddy isb 2396d5319afSMadhukar Pappireddy.arch_extension nosve 2406d5319afSMadhukar Pappireddy 2416d5319afSMadhukar Pappireddy /* Save FPSR, FPCR and FPEXC32 */ 2426d5319afSMadhukar Pappireddy fpregs_state_save x0, x9 2436d5319afSMadhukar Pappireddy 2446d5319afSMadhukar Pappireddy ret 2456d5319afSMadhukar Pappireddyendfunc sve_context_save 2466d5319afSMadhukar Pappireddy 2476d5319afSMadhukar Pappireddy/* ------------------------------------------------------------------ 2486d5319afSMadhukar Pappireddy * The following function follows the aapcs_64 strictly to use x9-x17 2496d5319afSMadhukar Pappireddy * (temporary caller-saved registers according to AArch64 PCS) to 2506d5319afSMadhukar Pappireddy * restore SVE register context. It assumes that 'x0' is pointing to 2516d5319afSMadhukar Pappireddy * a 'sve_regs_t' structure from where the register context will be 2526d5319afSMadhukar Pappireddy * restored. 2536d5319afSMadhukar Pappireddy * ------------------------------------------------------------------ 2546d5319afSMadhukar Pappireddy */ 2556d5319afSMadhukar Pappireddyfunc sve_context_restore 2566d5319afSMadhukar Pappireddy.arch_extension sve 2576d5319afSMadhukar Pappireddy /* Temporarily enable SVE for EL3 */ 2586d5319afSMadhukar Pappireddy mrs x10, cptr_el3 2596d5319afSMadhukar Pappireddy orr x11, x10, #CPTR_EZ_BIT 2606d5319afSMadhukar Pappireddy bic x11, x11, #TFP_BIT 2616d5319afSMadhukar Pappireddy msr cptr_el3, x11 2626d5319afSMadhukar Pappireddy isb 2636d5319afSMadhukar Pappireddy 2646d5319afSMadhukar Pappireddy /* zcr_el3 */ 2656d5319afSMadhukar Pappireddy mrs x12, S3_6_C1_C2_0 2666d5319afSMadhukar Pappireddy mov x13, #((SVE_VECTOR_LEN >> 7) - 1) 2676d5319afSMadhukar Pappireddy msr S3_6_C1_C2_0, x13 2686d5319afSMadhukar Pappireddy isb 2696d5319afSMadhukar Pappireddy 2706d5319afSMadhukar Pappireddy /* Restore FFR register before predicates */ 2716d5319afSMadhukar Pappireddy mov x13, #CTX_SIMD_FFR 2726d5319afSMadhukar Pappireddy add x9, x0, x13 2736d5319afSMadhukar Pappireddy ldr p0, [x9] 2746d5319afSMadhukar Pappireddy wrffr p0.b 2756d5319afSMadhukar Pappireddy 2766d5319afSMadhukar Pappireddy /* Restore predicate registers */ 2776d5319afSMadhukar Pappireddy mov x13, #CTX_SIMD_PREDICATES 2786d5319afSMadhukar Pappireddy add x9, x0, x13 2796d5319afSMadhukar Pappireddy sve_predicate_op ldr, x9 2806d5319afSMadhukar Pappireddy 2816d5319afSMadhukar Pappireddy /* Restore vector registers */ 2826d5319afSMadhukar Pappireddy mov x13, #CTX_SIMD_VECTORS 2836d5319afSMadhukar Pappireddy add x9, x0, x13 2846d5319afSMadhukar Pappireddy sve_vectors_op ldr, x9 2856d5319afSMadhukar Pappireddy 2866d5319afSMadhukar Pappireddy /* Restore SVE enablement */ 2876d5319afSMadhukar Pappireddy msr S3_6_C1_C2_0, x12 /* zcr_el3 */ 2886d5319afSMadhukar Pappireddy msr cptr_el3, x10 2896d5319afSMadhukar Pappireddy isb 2906d5319afSMadhukar Pappireddy.arch_extension nosve 2916d5319afSMadhukar Pappireddy 2926d5319afSMadhukar Pappireddy /* Restore FPSR, FPCR and FPEXC32 */ 2936d5319afSMadhukar Pappireddy fpregs_state_restore x0, x9 2946d5319afSMadhukar Pappireddy ret 2956d5319afSMadhukar Pappireddyendfunc sve_context_restore 2966d5319afSMadhukar Pappireddy#endif /* CTX_INCLUDE_SVE_REGS */ 2976d5319afSMadhukar Pappireddy 2987d33ffe4SDaniel Boulby /* 2991cbe42a5SManish Pandey * Set SCR_EL3.EA bit to enable SErrors at EL3 3001cbe42a5SManish Pandey */ 3011cbe42a5SManish Pandey .macro enable_serror_at_el3 3021cbe42a5SManish Pandey mrs x8, scr_el3 3031cbe42a5SManish Pandey orr x8, x8, #SCR_EA_BIT 3041cbe42a5SManish Pandey msr scr_el3, x8 3051cbe42a5SManish Pandey .endm 3061cbe42a5SManish Pandey 3071cbe42a5SManish Pandey /* 3087d33ffe4SDaniel Boulby * Set the PSTATE bits not set when the exception was taken as 3097d33ffe4SDaniel Boulby * described in the AArch64.TakeException() pseudocode function 3107d33ffe4SDaniel Boulby * in ARM DDI 0487F.c page J1-7635 to a default value. 3117d33ffe4SDaniel Boulby */ 3127d33ffe4SDaniel Boulby .macro set_unset_pstate_bits 3137d33ffe4SDaniel Boulby /* 3147d33ffe4SDaniel Boulby * If Data Independent Timing (DIT) functionality is implemented, 3157d33ffe4SDaniel Boulby * always enable DIT in EL3 3167d33ffe4SDaniel Boulby */ 3177d33ffe4SDaniel Boulby#if ENABLE_FEAT_DIT 31843d1d951SManish Pandey#if ENABLE_FEAT_DIT >= 2 31988727fc3SAndre Przywara mrs x8, id_aa64pfr0_el1 32088727fc3SAndre Przywara and x8, x8, #(ID_AA64PFR0_DIT_MASK << ID_AA64PFR0_DIT_SHIFT) 32188727fc3SAndre Przywara cbz x8, 1f 32288727fc3SAndre Przywara#endif 3237d33ffe4SDaniel Boulby mov x8, #DIT_BIT 3247d33ffe4SDaniel Boulby msr DIT, x8 32588727fc3SAndre Przywara1: 3267d33ffe4SDaniel Boulby#endif /* ENABLE_FEAT_DIT */ 3277d33ffe4SDaniel Boulby .endm /* set_unset_pstate_bits */ 3287d33ffe4SDaniel Boulby 329edebefbcSArvind Ram Prakash/*------------------------------------------------------------------------- 330edebefbcSArvind Ram Prakash * This macro checks the ENABLE_FEAT_MPAM state, performs ID register 331edebefbcSArvind Ram Prakash * check to see if the platform supports MPAM extension and restores MPAM3 332edebefbcSArvind Ram Prakash * register value if it is FEAT_STATE_ENABLED/FEAT_STATE_CHECKED. 333edebefbcSArvind Ram Prakash * 334edebefbcSArvind Ram Prakash * This is particularly more complicated because we can't check 335edebefbcSArvind Ram Prakash * if the platform supports MPAM by looking for status of a particular bit 336edebefbcSArvind Ram Prakash * in the MDCR_EL3 or CPTR_EL3 register like other extensions. 337edebefbcSArvind Ram Prakash * ------------------------------------------------------------------------ 338edebefbcSArvind Ram Prakash */ 339edebefbcSArvind Ram Prakash 340edebefbcSArvind Ram Prakash .macro restore_mpam3_el3 341edebefbcSArvind Ram Prakash#if ENABLE_FEAT_MPAM 34243d1d951SManish Pandey#if ENABLE_FEAT_MPAM >= 2 343edebefbcSArvind Ram Prakash mrs x8, id_aa64pfr0_el1 344edebefbcSArvind Ram Prakash lsr x8, x8, #(ID_AA64PFR0_MPAM_SHIFT) 345edebefbcSArvind Ram Prakash and x8, x8, #(ID_AA64PFR0_MPAM_MASK) 346edebefbcSArvind Ram Prakash mrs x7, id_aa64pfr1_el1 347edebefbcSArvind Ram Prakash lsr x7, x7, #(ID_AA64PFR1_MPAM_FRAC_SHIFT) 348edebefbcSArvind Ram Prakash and x7, x7, #(ID_AA64PFR1_MPAM_FRAC_MASK) 349edebefbcSArvind Ram Prakash orr x7, x7, x8 350edebefbcSArvind Ram Prakash cbz x7, no_mpam 351edebefbcSArvind Ram Prakash#endif 352edebefbcSArvind Ram Prakash /* ----------------------------------------------------------- 353edebefbcSArvind Ram Prakash * Restore MPAM3_EL3 register as per context state 354edebefbcSArvind Ram Prakash * Currently we only enable MPAM for NS world and trap to EL3 355edebefbcSArvind Ram Prakash * for MPAM access in lower ELs of Secure and Realm world 356ac4f6aafSArvind Ram Prakash * x9 holds address of the per_world context 357edebefbcSArvind Ram Prakash * ----------------------------------------------------------- 358edebefbcSArvind Ram Prakash */ 359ac4f6aafSArvind Ram Prakash 360ac4f6aafSArvind Ram Prakash ldr x17, [x9, #CTX_MPAM3_EL3] 361edebefbcSArvind Ram Prakash msr S3_6_C10_C5_0, x17 /* mpam3_el3 */ 362edebefbcSArvind Ram Prakash 363edebefbcSArvind Ram Prakashno_mpam: 364edebefbcSArvind Ram Prakash#endif 365edebefbcSArvind Ram Prakash .endm /* restore_mpam3_el3 */ 366edebefbcSArvind Ram Prakash 367ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 36897215e0fSDaniel Boulby * The following macro is used to save and restore all the general 369ed108b56SAlexei Fedorov * purpose and ARMv8.3-PAuth (if enabled) registers. 370d64bfef5SJayanth Dodderi Chidanand * It also checks if the Secure Cycle Counter (PMCCNTR_EL0) 371d64bfef5SJayanth Dodderi Chidanand * is disabled in EL3/Secure (ARMv8.5-PMU), wherein PMCCNTR_EL0 372d64bfef5SJayanth Dodderi Chidanand * needs not to be saved/restored during world switch. 373ed108b56SAlexei Fedorov * 374ed108b56SAlexei Fedorov * Ideally we would only save and restore the callee saved registers 375ed108b56SAlexei Fedorov * when a world switch occurs but that type of implementation is more 376ed108b56SAlexei Fedorov * complex. So currently we will always save and restore these 377ed108b56SAlexei Fedorov * registers on entry and exit of EL3. 378532ed618SSoby Mathew * clobbers: x18 379ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 380532ed618SSoby Mathew */ 38197215e0fSDaniel Boulby .macro save_gp_pmcr_pauth_regs 382532ed618SSoby Mathew stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 383532ed618SSoby Mathew stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 384532ed618SSoby Mathew stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 385532ed618SSoby Mathew stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] 386532ed618SSoby Mathew stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] 387532ed618SSoby Mathew stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] 388532ed618SSoby Mathew stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] 389532ed618SSoby Mathew stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] 390532ed618SSoby Mathew stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] 391532ed618SSoby Mathew stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] 392532ed618SSoby Mathew stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] 393532ed618SSoby Mathew stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] 394532ed618SSoby Mathew stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] 395532ed618SSoby Mathew stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] 396532ed618SSoby Mathew stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] 397532ed618SSoby Mathew mrs x18, sp_el0 398532ed618SSoby Mathew str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] 399c73686a1SBoyan Karatotev 400c73686a1SBoyan Karatotev /* PMUv3 is presumed to be always present */ 401ed108b56SAlexei Fedorov mrs x9, pmcr_el0 402ed108b56SAlexei Fedorov str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] 403ed108b56SAlexei Fedorov isb 404ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS 405ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 406ed108b56SAlexei Fedorov * Save the ARMv8.3-PAuth keys as they are not banked 407ed108b56SAlexei Fedorov * by exception level 408ed108b56SAlexei Fedorov * ---------------------------------------------------------- 409ed108b56SAlexei Fedorov */ 410ed108b56SAlexei Fedorov add x19, sp, #CTX_PAUTH_REGS_OFFSET 411ed108b56SAlexei Fedorov 412ed108b56SAlexei Fedorov mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */ 413ed108b56SAlexei Fedorov mrs x21, APIAKeyHi_EL1 414ed108b56SAlexei Fedorov mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */ 415ed108b56SAlexei Fedorov mrs x23, APIBKeyHi_EL1 416ed108b56SAlexei Fedorov mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */ 417ed108b56SAlexei Fedorov mrs x25, APDAKeyHi_EL1 418ed108b56SAlexei Fedorov mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */ 419ed108b56SAlexei Fedorov mrs x27, APDBKeyHi_EL1 420ed108b56SAlexei Fedorov mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */ 421ed108b56SAlexei Fedorov mrs x29, APGAKeyHi_EL1 422ed108b56SAlexei Fedorov 423ed108b56SAlexei Fedorov stp x20, x21, [x19, #CTX_PACIAKEY_LO] 424ed108b56SAlexei Fedorov stp x22, x23, [x19, #CTX_PACIBKEY_LO] 425ed108b56SAlexei Fedorov stp x24, x25, [x19, #CTX_PACDAKEY_LO] 426ed108b56SAlexei Fedorov stp x26, x27, [x19, #CTX_PACDBKEY_LO] 427ed108b56SAlexei Fedorov stp x28, x29, [x19, #CTX_PACGAKEY_LO] 428ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */ 42997215e0fSDaniel Boulby .endm /* save_gp_pmcr_pauth_regs */ 43097215e0fSDaniel Boulby 43197215e0fSDaniel Boulby/* ----------------------------------------------------------------- 4327d33ffe4SDaniel Boulby * This function saves the context and sets the PSTATE to a known 4337d33ffe4SDaniel Boulby * state, preparing entry to el3. 43497215e0fSDaniel Boulby * Save all the general purpose and ARMv8.3-PAuth (if enabled) 43597215e0fSDaniel Boulby * registers. 4367d33ffe4SDaniel Boulby * Then set any of the PSTATE bits that are not set by hardware 4377d33ffe4SDaniel Boulby * according to the Aarch64.TakeException pseudocode in the Arm 4387d33ffe4SDaniel Boulby * Architecture Reference Manual to a default value for EL3. 4397d33ffe4SDaniel Boulby * clobbers: x17 44097215e0fSDaniel Boulby * ----------------------------------------------------------------- 44197215e0fSDaniel Boulby */ 44297215e0fSDaniel Boulbyfunc prepare_el3_entry 44397215e0fSDaniel Boulby save_gp_pmcr_pauth_regs 444*40e5f7a5SJayanth Dodderi Chidanand setup_el3_execution_context 445ed108b56SAlexei Fedorov ret 44697215e0fSDaniel Boulbyendfunc prepare_el3_entry 447ed108b56SAlexei Fedorov 448ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 449ed108b56SAlexei Fedorov * This function restores ARMv8.3-PAuth (if enabled) and all general 450ed108b56SAlexei Fedorov * purpose registers except x30 from the CPU context. 451ed108b56SAlexei Fedorov * x30 register must be explicitly restored by the caller. 452ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 453ed108b56SAlexei Fedorov */ 454ed108b56SAlexei Fedorovfunc restore_gp_pmcr_pauth_regs 455ed108b56SAlexei Fedorov#if CTX_INCLUDE_PAUTH_REGS 456ed108b56SAlexei Fedorov /* Restore the ARMv8.3 PAuth keys */ 457ed108b56SAlexei Fedorov add x10, sp, #CTX_PAUTH_REGS_OFFSET 458ed108b56SAlexei Fedorov 459ed108b56SAlexei Fedorov ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */ 460ed108b56SAlexei Fedorov ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */ 461ed108b56SAlexei Fedorov ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */ 462ed108b56SAlexei Fedorov ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */ 463ed108b56SAlexei Fedorov ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */ 464ed108b56SAlexei Fedorov 465ed108b56SAlexei Fedorov msr APIAKeyLo_EL1, x0 466ed108b56SAlexei Fedorov msr APIAKeyHi_EL1, x1 467ed108b56SAlexei Fedorov msr APIBKeyLo_EL1, x2 468ed108b56SAlexei Fedorov msr APIBKeyHi_EL1, x3 469ed108b56SAlexei Fedorov msr APDAKeyLo_EL1, x4 470ed108b56SAlexei Fedorov msr APDAKeyHi_EL1, x5 471ed108b56SAlexei Fedorov msr APDBKeyLo_EL1, x6 472ed108b56SAlexei Fedorov msr APDBKeyHi_EL1, x7 473ed108b56SAlexei Fedorov msr APGAKeyLo_EL1, x8 474ed108b56SAlexei Fedorov msr APGAKeyHi_EL1, x9 475ed108b56SAlexei Fedorov#endif /* CTX_INCLUDE_PAUTH_REGS */ 476c73686a1SBoyan Karatotev 477c73686a1SBoyan Karatotev /* PMUv3 is presumed to be always present */ 478ed108b56SAlexei Fedorov ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] 479ed108b56SAlexei Fedorov msr pmcr_el0, x0 480532ed618SSoby Mathew ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 481532ed618SSoby Mathew ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 482532ed618SSoby Mathew ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 483532ed618SSoby Mathew ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] 484532ed618SSoby Mathew ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] 485532ed618SSoby Mathew ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] 486532ed618SSoby Mathew ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] 487532ed618SSoby Mathew ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] 488ef653d93SJeenu Viswambharan ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] 489532ed618SSoby Mathew ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] 490532ed618SSoby Mathew ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] 491532ed618SSoby Mathew ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] 492532ed618SSoby Mathew ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] 493532ed618SSoby Mathew ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] 494ef653d93SJeenu Viswambharan ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] 495ef653d93SJeenu Viswambharan msr sp_el0, x28 496532ed618SSoby Mathew ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] 497ef653d93SJeenu Viswambharan ret 498ed108b56SAlexei Fedorovendfunc restore_gp_pmcr_pauth_regs 499ef653d93SJeenu Viswambharan 50059b7c0a0SJayanth Dodderi Chidanand#if ERRATA_SPECULATIVE_AT 50159b7c0a0SJayanth Dodderi Chidanand/* -------------------------------------------------------------------- 5023b8456bdSManish V Badarkhe * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1 5033b8456bdSManish V Badarkhe * registers and update EL1 registers to disable stage1 and stage2 50459b7c0a0SJayanth Dodderi Chidanand * page table walk. 50559b7c0a0SJayanth Dodderi Chidanand * -------------------------------------------------------------------- 5063b8456bdSManish V Badarkhe */ 5073b8456bdSManish V Badarkhefunc save_and_update_ptw_el1_sys_regs 5083b8456bdSManish V Badarkhe /* ---------------------------------------------------------- 5093b8456bdSManish V Badarkhe * Save only sctlr_el1 and tcr_el1 registers 5103b8456bdSManish V Badarkhe * ---------------------------------------------------------- 5113b8456bdSManish V Badarkhe */ 5123b8456bdSManish V Badarkhe mrs x29, sctlr_el1 51359b7c0a0SJayanth Dodderi Chidanand str x29, [sp, #(CTX_ERRATA_SPEC_AT_OFFSET + CTX_ERRATA_SPEC_AT_SCTLR_EL1)] 5143b8456bdSManish V Badarkhe mrs x29, tcr_el1 51559b7c0a0SJayanth Dodderi Chidanand str x29, [sp, #(CTX_ERRATA_SPEC_AT_OFFSET + CTX_ERRATA_SPEC_AT_TCR_EL1)] 5163b8456bdSManish V Badarkhe 5173b8456bdSManish V Badarkhe /* ------------------------------------------------------------ 5183b8456bdSManish V Badarkhe * Must follow below order in order to disable page table 5193b8456bdSManish V Badarkhe * walk for lower ELs (EL1 and EL0). First step ensures that 5203b8456bdSManish V Badarkhe * page table walk is disabled for stage1 and second step 5213b8456bdSManish V Badarkhe * ensures that page table walker should use TCR_EL1.EPDx 5223b8456bdSManish V Badarkhe * bits to perform address translation. ISB ensures that CPU 5233b8456bdSManish V Badarkhe * does these 2 steps in order. 5243b8456bdSManish V Badarkhe * 5253b8456bdSManish V Badarkhe * 1. Update TCR_EL1.EPDx bits to disable page table walk by 5263b8456bdSManish V Badarkhe * stage1. 5273b8456bdSManish V Badarkhe * 2. Enable MMU bit to avoid identity mapping via stage2 5283b8456bdSManish V Badarkhe * and force TCR_EL1.EPDx to be used by the page table 5293b8456bdSManish V Badarkhe * walker. 5303b8456bdSManish V Badarkhe * ------------------------------------------------------------ 5313b8456bdSManish V Badarkhe */ 5323b8456bdSManish V Badarkhe orr x29, x29, #(TCR_EPD0_BIT) 5333b8456bdSManish V Badarkhe orr x29, x29, #(TCR_EPD1_BIT) 5343b8456bdSManish V Badarkhe msr tcr_el1, x29 5353b8456bdSManish V Badarkhe isb 5363b8456bdSManish V Badarkhe mrs x29, sctlr_el1 5373b8456bdSManish V Badarkhe orr x29, x29, #SCTLR_M_BIT 5383b8456bdSManish V Badarkhe msr sctlr_el1, x29 5393b8456bdSManish V Badarkhe isb 5403b8456bdSManish V Badarkhe ret 5413b8456bdSManish V Badarkheendfunc save_and_update_ptw_el1_sys_regs 5423b8456bdSManish V Badarkhe 54359b7c0a0SJayanth Dodderi Chidanand#endif /* ERRATA_SPECULATIVE_AT */ 54459b7c0a0SJayanth Dodderi Chidanand 545461c0a5dSElizabeth Ho/* ----------------------------------------------------------------- 546461c0a5dSElizabeth Ho* The below macro returns the address of the per_world context for 547461c0a5dSElizabeth Ho* the security state, retrieved through "get_security_state" macro. 548461c0a5dSElizabeth Ho* The per_world context address is returned in the register argument. 549461c0a5dSElizabeth Ho* Clobbers: x9, x10 550461c0a5dSElizabeth Ho* ------------------------------------------------------------------ 551461c0a5dSElizabeth Ho*/ 552461c0a5dSElizabeth Ho 553461c0a5dSElizabeth Ho.macro get_per_world_context _reg:req 554461c0a5dSElizabeth Ho ldr x10, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] 555461c0a5dSElizabeth Ho get_security_state x9, x10 5564087ed6cSJayanth Dodderi Chidanand mov_imm x10, (CTX_PERWORLD_EL3STATE_END - CTX_CPTR_EL3) 557461c0a5dSElizabeth Ho mul x9, x9, x10 558461c0a5dSElizabeth Ho adrp x10, per_world_context 559461c0a5dSElizabeth Ho add x10, x10, :lo12:per_world_context 560461c0a5dSElizabeth Ho add x9, x9, x10 561461c0a5dSElizabeth Ho mov \_reg, x9 562461c0a5dSElizabeth Ho.endm 563461c0a5dSElizabeth Ho 564ed108b56SAlexei Fedorov/* ------------------------------------------------------------------ 565ed108b56SAlexei Fedorov * This routine assumes that the SP_EL3 is pointing to a valid 566ed108b56SAlexei Fedorov * context structure from where the gp regs and other special 567ed108b56SAlexei Fedorov * registers can be retrieved. 568ed108b56SAlexei Fedorov * ------------------------------------------------------------------ 569532ed618SSoby Mathew */ 570532ed618SSoby Mathewfunc el3_exit 571bb9549baSJan Dabros#if ENABLE_ASSERTIONS 572bb9549baSJan Dabros /* el3_exit assumes SP_EL0 on entry */ 573bb9549baSJan Dabros mrs x17, spsel 574bb9549baSJan Dabros cmp x17, #MODE_SP_EL0 575bb9549baSJan Dabros ASM_ASSERT(eq) 5760ce220afSJayanth Dodderi Chidanand#endif /* ENABLE_ASSERTIONS */ 577bb9549baSJan Dabros 578ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 579ed108b56SAlexei Fedorov * Save the current SP_EL0 i.e. the EL3 runtime stack which 580ed108b56SAlexei Fedorov * will be used for handling the next SMC. 581ed108b56SAlexei Fedorov * Then switch to SP_EL3. 582ed108b56SAlexei Fedorov * ---------------------------------------------------------- 583532ed618SSoby Mathew */ 584532ed618SSoby Mathew mov x17, sp 585ed108b56SAlexei Fedorov msr spsel, #MODE_SP_ELX 586532ed618SSoby Mathew str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] 587532ed618SSoby Mathew 5880c5e7d1cSMax Shvetsov /* ---------------------------------------------------------- 58968ac5ed0SArunachalam Ganapathy * Restore CPTR_EL3. 5900c5e7d1cSMax Shvetsov * ZCR is only restored if SVE is supported and enabled. 5910c5e7d1cSMax Shvetsov * Synchronization is required before zcr_el3 is addressed. 5920c5e7d1cSMax Shvetsov * ---------------------------------------------------------- 5930c5e7d1cSMax Shvetsov */ 594461c0a5dSElizabeth Ho 595461c0a5dSElizabeth Ho /* The address of the per_world context is stored in x9 */ 596461c0a5dSElizabeth Ho get_per_world_context x9 597461c0a5dSElizabeth Ho 598461c0a5dSElizabeth Ho ldp x19, x20, [x9, #CTX_CPTR_EL3] 5990c5e7d1cSMax Shvetsov msr cptr_el3, x19 6000c5e7d1cSMax Shvetsov 601f0c96a2eSBoyan Karatotev#if IMAGE_BL31 6020c5e7d1cSMax Shvetsov ands x19, x19, #CPTR_EZ_BIT 6030c5e7d1cSMax Shvetsov beq sve_not_enabled 6040c5e7d1cSMax Shvetsov 6050c5e7d1cSMax Shvetsov isb 6060c5e7d1cSMax Shvetsov msr S3_6_C1_C2_0, x20 /* zcr_el3 */ 6070c5e7d1cSMax Shvetsovsve_not_enabled: 608edebefbcSArvind Ram Prakash 609edebefbcSArvind Ram Prakash restore_mpam3_el3 610edebefbcSArvind Ram Prakash 6110ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 */ 6120c5e7d1cSMax Shvetsov 613fe007b2eSDimitris Papastamos#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 614ed108b56SAlexei Fedorov /* ---------------------------------------------------------- 615ed108b56SAlexei Fedorov * Restore mitigation state as it was on entry to EL3 616ed108b56SAlexei Fedorov * ---------------------------------------------------------- 617ed108b56SAlexei Fedorov */ 618fe007b2eSDimitris Papastamos ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE] 619ed108b56SAlexei Fedorov cbz x17, 1f 620fe007b2eSDimitris Papastamos blr x17 6214d1ccf0eSAntonio Nino Diaz1: 6220ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */ 6230ce220afSJayanth Dodderi Chidanand 6246597fcf1SManish Pandey#if IMAGE_BL31 6256597fcf1SManish Pandey synchronize_errors 6266597fcf1SManish Pandey#endif /* IMAGE_BL31 */ 6270ce220afSJayanth Dodderi Chidanand 628123002f9SJayanth Dodderi Chidanand /* -------------------------------------------------------------- 629123002f9SJayanth Dodderi Chidanand * Restore MDCR_EL3, SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET 630123002f9SJayanth Dodderi Chidanand * -------------------------------------------------------------- 631ff1d2ef3SManish Pandey */ 632ff1d2ef3SManish Pandey ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] 633123002f9SJayanth Dodderi Chidanand ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] 634123002f9SJayanth Dodderi Chidanand ldr x19, [sp, #CTX_EL3STATE_OFFSET + CTX_MDCR_EL3] 635ff1d2ef3SManish Pandey msr spsr_el3, x16 636ff1d2ef3SManish Pandey msr elr_el3, x17 637123002f9SJayanth Dodderi Chidanand msr scr_el3, x18 638123002f9SJayanth Dodderi Chidanand msr mdcr_el3, x19 639ff1d2ef3SManish Pandey 640ff1d2ef3SManish Pandey restore_ptw_el1_sys_regs 641ff1d2ef3SManish Pandey 642ff1d2ef3SManish Pandey /* ---------------------------------------------------------- 643ff1d2ef3SManish Pandey * Restore general purpose (including x30), PMCR_EL0 and 644ff1d2ef3SManish Pandey * ARMv8.3-PAuth registers. 645ff1d2ef3SManish Pandey * Exit EL3 via ERET to a lower exception level. 646ff1d2ef3SManish Pandey * ---------------------------------------------------------- 647ff1d2ef3SManish Pandey */ 648ff1d2ef3SManish Pandey bl restore_gp_pmcr_pauth_regs 649ff1d2ef3SManish Pandey ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 650ff1d2ef3SManish Pandey 651c2d32a5fSMadhukar Pappireddy#ifdef IMAGE_BL31 652d04c04a4SManish Pandey /* Clear the EL3 flag as we are exiting el3 */ 653d04c04a4SManish Pandey str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_NESTED_EA_FLAG] 6540ce220afSJayanth Dodderi Chidanand#endif /* IMAGE_BL31 */ 6550ce220afSJayanth Dodderi Chidanand 656f461fe34SAnthony Steinhauser exception_return 6575283962eSAntonio Nino Diaz 658532ed618SSoby Mathewendfunc el3_exit 659