1*532ed618SSoby Mathew/* 2*532ed618SSoby Mathew * Copyright (c) 2013-2016, ARM Limited and Contributors. All rights reserved. 3*532ed618SSoby Mathew * 4*532ed618SSoby Mathew * Redistribution and use in source and binary forms, with or without 5*532ed618SSoby Mathew * modification, are permitted provided that the following conditions are met: 6*532ed618SSoby Mathew * 7*532ed618SSoby Mathew * Redistributions of source code must retain the above copyright notice, this 8*532ed618SSoby Mathew * list of conditions and the following disclaimer. 9*532ed618SSoby Mathew * 10*532ed618SSoby Mathew * Redistributions in binary form must reproduce the above copyright notice, 11*532ed618SSoby Mathew * this list of conditions and the following disclaimer in the documentation 12*532ed618SSoby Mathew * and/or other materials provided with the distribution. 13*532ed618SSoby Mathew * 14*532ed618SSoby Mathew * Neither the name of ARM nor the names of its contributors may be used 15*532ed618SSoby Mathew * to endorse or promote products derived from this software without specific 16*532ed618SSoby Mathew * prior written permission. 17*532ed618SSoby Mathew * 18*532ed618SSoby Mathew * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 19*532ed618SSoby Mathew * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 20*532ed618SSoby Mathew * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 21*532ed618SSoby Mathew * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE 22*532ed618SSoby Mathew * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 23*532ed618SSoby Mathew * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 24*532ed618SSoby Mathew * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 25*532ed618SSoby Mathew * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 26*532ed618SSoby Mathew * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 27*532ed618SSoby Mathew * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 28*532ed618SSoby Mathew * POSSIBILITY OF SUCH DAMAGE. 29*532ed618SSoby Mathew */ 30*532ed618SSoby Mathew 31*532ed618SSoby Mathew#include <arch.h> 32*532ed618SSoby Mathew#include <asm_macros.S> 33*532ed618SSoby Mathew#include <context.h> 34*532ed618SSoby Mathew 35*532ed618SSoby Mathew .global el1_sysregs_context_save 36*532ed618SSoby Mathew .global el1_sysregs_context_restore 37*532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS 38*532ed618SSoby Mathew .global fpregs_context_save 39*532ed618SSoby Mathew .global fpregs_context_restore 40*532ed618SSoby Mathew#endif 41*532ed618SSoby Mathew .global save_gp_registers 42*532ed618SSoby Mathew .global restore_gp_registers_eret 43*532ed618SSoby Mathew .global restore_gp_registers_callee_eret 44*532ed618SSoby Mathew .global el3_exit 45*532ed618SSoby Mathew 46*532ed618SSoby Mathew/* ----------------------------------------------------- 47*532ed618SSoby Mathew * The following function strictly follows the AArch64 48*532ed618SSoby Mathew * PCS to use x9-x17 (temporary caller-saved registers) 49*532ed618SSoby Mathew * to save EL1 system register context. It assumes that 50*532ed618SSoby Mathew * 'x0' is pointing to a 'el1_sys_regs' structure where 51*532ed618SSoby Mathew * the register context will be saved. 52*532ed618SSoby Mathew * ----------------------------------------------------- 53*532ed618SSoby Mathew */ 54*532ed618SSoby Mathewfunc el1_sysregs_context_save 55*532ed618SSoby Mathew 56*532ed618SSoby Mathew mrs x9, spsr_el1 57*532ed618SSoby Mathew mrs x10, elr_el1 58*532ed618SSoby Mathew stp x9, x10, [x0, #CTX_SPSR_EL1] 59*532ed618SSoby Mathew 60*532ed618SSoby Mathew mrs x15, sctlr_el1 61*532ed618SSoby Mathew mrs x16, actlr_el1 62*532ed618SSoby Mathew stp x15, x16, [x0, #CTX_SCTLR_EL1] 63*532ed618SSoby Mathew 64*532ed618SSoby Mathew mrs x17, cpacr_el1 65*532ed618SSoby Mathew mrs x9, csselr_el1 66*532ed618SSoby Mathew stp x17, x9, [x0, #CTX_CPACR_EL1] 67*532ed618SSoby Mathew 68*532ed618SSoby Mathew mrs x10, sp_el1 69*532ed618SSoby Mathew mrs x11, esr_el1 70*532ed618SSoby Mathew stp x10, x11, [x0, #CTX_SP_EL1] 71*532ed618SSoby Mathew 72*532ed618SSoby Mathew mrs x12, ttbr0_el1 73*532ed618SSoby Mathew mrs x13, ttbr1_el1 74*532ed618SSoby Mathew stp x12, x13, [x0, #CTX_TTBR0_EL1] 75*532ed618SSoby Mathew 76*532ed618SSoby Mathew mrs x14, mair_el1 77*532ed618SSoby Mathew mrs x15, amair_el1 78*532ed618SSoby Mathew stp x14, x15, [x0, #CTX_MAIR_EL1] 79*532ed618SSoby Mathew 80*532ed618SSoby Mathew mrs x16, tcr_el1 81*532ed618SSoby Mathew mrs x17, tpidr_el1 82*532ed618SSoby Mathew stp x16, x17, [x0, #CTX_TCR_EL1] 83*532ed618SSoby Mathew 84*532ed618SSoby Mathew mrs x9, tpidr_el0 85*532ed618SSoby Mathew mrs x10, tpidrro_el0 86*532ed618SSoby Mathew stp x9, x10, [x0, #CTX_TPIDR_EL0] 87*532ed618SSoby Mathew 88*532ed618SSoby Mathew mrs x13, par_el1 89*532ed618SSoby Mathew mrs x14, far_el1 90*532ed618SSoby Mathew stp x13, x14, [x0, #CTX_PAR_EL1] 91*532ed618SSoby Mathew 92*532ed618SSoby Mathew mrs x15, afsr0_el1 93*532ed618SSoby Mathew mrs x16, afsr1_el1 94*532ed618SSoby Mathew stp x15, x16, [x0, #CTX_AFSR0_EL1] 95*532ed618SSoby Mathew 96*532ed618SSoby Mathew mrs x17, contextidr_el1 97*532ed618SSoby Mathew mrs x9, vbar_el1 98*532ed618SSoby Mathew stp x17, x9, [x0, #CTX_CONTEXTIDR_EL1] 99*532ed618SSoby Mathew 100*532ed618SSoby Mathew /* Save AArch32 system registers if the build has instructed so */ 101*532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS 102*532ed618SSoby Mathew mrs x11, spsr_abt 103*532ed618SSoby Mathew mrs x12, spsr_und 104*532ed618SSoby Mathew stp x11, x12, [x0, #CTX_SPSR_ABT] 105*532ed618SSoby Mathew 106*532ed618SSoby Mathew mrs x13, spsr_irq 107*532ed618SSoby Mathew mrs x14, spsr_fiq 108*532ed618SSoby Mathew stp x13, x14, [x0, #CTX_SPSR_IRQ] 109*532ed618SSoby Mathew 110*532ed618SSoby Mathew mrs x15, dacr32_el2 111*532ed618SSoby Mathew mrs x16, ifsr32_el2 112*532ed618SSoby Mathew stp x15, x16, [x0, #CTX_DACR32_EL2] 113*532ed618SSoby Mathew 114*532ed618SSoby Mathew mrs x17, fpexc32_el2 115*532ed618SSoby Mathew str x17, [x0, #CTX_FP_FPEXC32_EL2] 116*532ed618SSoby Mathew#endif 117*532ed618SSoby Mathew 118*532ed618SSoby Mathew /* Save NS timer registers if the build has instructed so */ 119*532ed618SSoby Mathew#if NS_TIMER_SWITCH 120*532ed618SSoby Mathew mrs x10, cntp_ctl_el0 121*532ed618SSoby Mathew mrs x11, cntp_cval_el0 122*532ed618SSoby Mathew stp x10, x11, [x0, #CTX_CNTP_CTL_EL0] 123*532ed618SSoby Mathew 124*532ed618SSoby Mathew mrs x12, cntv_ctl_el0 125*532ed618SSoby Mathew mrs x13, cntv_cval_el0 126*532ed618SSoby Mathew stp x12, x13, [x0, #CTX_CNTV_CTL_EL0] 127*532ed618SSoby Mathew 128*532ed618SSoby Mathew mrs x14, cntkctl_el1 129*532ed618SSoby Mathew str x14, [x0, #CTX_CNTKCTL_EL1] 130*532ed618SSoby Mathew#endif 131*532ed618SSoby Mathew 132*532ed618SSoby Mathew ret 133*532ed618SSoby Mathewendfunc el1_sysregs_context_save 134*532ed618SSoby Mathew 135*532ed618SSoby Mathew/* ----------------------------------------------------- 136*532ed618SSoby Mathew * The following function strictly follows the AArch64 137*532ed618SSoby Mathew * PCS to use x9-x17 (temporary caller-saved registers) 138*532ed618SSoby Mathew * to restore EL1 system register context. It assumes 139*532ed618SSoby Mathew * that 'x0' is pointing to a 'el1_sys_regs' structure 140*532ed618SSoby Mathew * from where the register context will be restored 141*532ed618SSoby Mathew * ----------------------------------------------------- 142*532ed618SSoby Mathew */ 143*532ed618SSoby Mathewfunc el1_sysregs_context_restore 144*532ed618SSoby Mathew 145*532ed618SSoby Mathew ldp x9, x10, [x0, #CTX_SPSR_EL1] 146*532ed618SSoby Mathew msr spsr_el1, x9 147*532ed618SSoby Mathew msr elr_el1, x10 148*532ed618SSoby Mathew 149*532ed618SSoby Mathew ldp x15, x16, [x0, #CTX_SCTLR_EL1] 150*532ed618SSoby Mathew msr sctlr_el1, x15 151*532ed618SSoby Mathew msr actlr_el1, x16 152*532ed618SSoby Mathew 153*532ed618SSoby Mathew ldp x17, x9, [x0, #CTX_CPACR_EL1] 154*532ed618SSoby Mathew msr cpacr_el1, x17 155*532ed618SSoby Mathew msr csselr_el1, x9 156*532ed618SSoby Mathew 157*532ed618SSoby Mathew ldp x10, x11, [x0, #CTX_SP_EL1] 158*532ed618SSoby Mathew msr sp_el1, x10 159*532ed618SSoby Mathew msr esr_el1, x11 160*532ed618SSoby Mathew 161*532ed618SSoby Mathew ldp x12, x13, [x0, #CTX_TTBR0_EL1] 162*532ed618SSoby Mathew msr ttbr0_el1, x12 163*532ed618SSoby Mathew msr ttbr1_el1, x13 164*532ed618SSoby Mathew 165*532ed618SSoby Mathew ldp x14, x15, [x0, #CTX_MAIR_EL1] 166*532ed618SSoby Mathew msr mair_el1, x14 167*532ed618SSoby Mathew msr amair_el1, x15 168*532ed618SSoby Mathew 169*532ed618SSoby Mathew ldp x16, x17, [x0, #CTX_TCR_EL1] 170*532ed618SSoby Mathew msr tcr_el1, x16 171*532ed618SSoby Mathew msr tpidr_el1, x17 172*532ed618SSoby Mathew 173*532ed618SSoby Mathew ldp x9, x10, [x0, #CTX_TPIDR_EL0] 174*532ed618SSoby Mathew msr tpidr_el0, x9 175*532ed618SSoby Mathew msr tpidrro_el0, x10 176*532ed618SSoby Mathew 177*532ed618SSoby Mathew ldp x13, x14, [x0, #CTX_PAR_EL1] 178*532ed618SSoby Mathew msr par_el1, x13 179*532ed618SSoby Mathew msr far_el1, x14 180*532ed618SSoby Mathew 181*532ed618SSoby Mathew ldp x15, x16, [x0, #CTX_AFSR0_EL1] 182*532ed618SSoby Mathew msr afsr0_el1, x15 183*532ed618SSoby Mathew msr afsr1_el1, x16 184*532ed618SSoby Mathew 185*532ed618SSoby Mathew ldp x17, x9, [x0, #CTX_CONTEXTIDR_EL1] 186*532ed618SSoby Mathew msr contextidr_el1, x17 187*532ed618SSoby Mathew msr vbar_el1, x9 188*532ed618SSoby Mathew 189*532ed618SSoby Mathew /* Restore AArch32 system registers if the build has instructed so */ 190*532ed618SSoby Mathew#if CTX_INCLUDE_AARCH32_REGS 191*532ed618SSoby Mathew ldp x11, x12, [x0, #CTX_SPSR_ABT] 192*532ed618SSoby Mathew msr spsr_abt, x11 193*532ed618SSoby Mathew msr spsr_und, x12 194*532ed618SSoby Mathew 195*532ed618SSoby Mathew ldp x13, x14, [x0, #CTX_SPSR_IRQ] 196*532ed618SSoby Mathew msr spsr_irq, x13 197*532ed618SSoby Mathew msr spsr_fiq, x14 198*532ed618SSoby Mathew 199*532ed618SSoby Mathew ldp x15, x16, [x0, #CTX_DACR32_EL2] 200*532ed618SSoby Mathew msr dacr32_el2, x15 201*532ed618SSoby Mathew msr ifsr32_el2, x16 202*532ed618SSoby Mathew 203*532ed618SSoby Mathew ldr x17, [x0, #CTX_FP_FPEXC32_EL2] 204*532ed618SSoby Mathew msr fpexc32_el2, x17 205*532ed618SSoby Mathew#endif 206*532ed618SSoby Mathew /* Restore NS timer registers if the build has instructed so */ 207*532ed618SSoby Mathew#if NS_TIMER_SWITCH 208*532ed618SSoby Mathew ldp x10, x11, [x0, #CTX_CNTP_CTL_EL0] 209*532ed618SSoby Mathew msr cntp_ctl_el0, x10 210*532ed618SSoby Mathew msr cntp_cval_el0, x11 211*532ed618SSoby Mathew 212*532ed618SSoby Mathew ldp x12, x13, [x0, #CTX_CNTV_CTL_EL0] 213*532ed618SSoby Mathew msr cntv_ctl_el0, x12 214*532ed618SSoby Mathew msr cntv_cval_el0, x13 215*532ed618SSoby Mathew 216*532ed618SSoby Mathew ldr x14, [x0, #CTX_CNTKCTL_EL1] 217*532ed618SSoby Mathew msr cntkctl_el1, x14 218*532ed618SSoby Mathew#endif 219*532ed618SSoby Mathew 220*532ed618SSoby Mathew /* No explict ISB required here as ERET covers it */ 221*532ed618SSoby Mathew ret 222*532ed618SSoby Mathewendfunc el1_sysregs_context_restore 223*532ed618SSoby Mathew 224*532ed618SSoby Mathew/* ----------------------------------------------------- 225*532ed618SSoby Mathew * The following function follows the aapcs_64 strictly 226*532ed618SSoby Mathew * to use x9-x17 (temporary caller-saved registers 227*532ed618SSoby Mathew * according to AArch64 PCS) to save floating point 228*532ed618SSoby Mathew * register context. It assumes that 'x0' is pointing to 229*532ed618SSoby Mathew * a 'fp_regs' structure where the register context will 230*532ed618SSoby Mathew * be saved. 231*532ed618SSoby Mathew * 232*532ed618SSoby Mathew * Access to VFP registers will trap if CPTR_EL3.TFP is 233*532ed618SSoby Mathew * set. However currently we don't use VFP registers 234*532ed618SSoby Mathew * nor set traps in Trusted Firmware, and assume it's 235*532ed618SSoby Mathew * cleared 236*532ed618SSoby Mathew * 237*532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world 238*532ed618SSoby Mathew * ----------------------------------------------------- 239*532ed618SSoby Mathew */ 240*532ed618SSoby Mathew#if CTX_INCLUDE_FPREGS 241*532ed618SSoby Mathewfunc fpregs_context_save 242*532ed618SSoby Mathew stp q0, q1, [x0, #CTX_FP_Q0] 243*532ed618SSoby Mathew stp q2, q3, [x0, #CTX_FP_Q2] 244*532ed618SSoby Mathew stp q4, q5, [x0, #CTX_FP_Q4] 245*532ed618SSoby Mathew stp q6, q7, [x0, #CTX_FP_Q6] 246*532ed618SSoby Mathew stp q8, q9, [x0, #CTX_FP_Q8] 247*532ed618SSoby Mathew stp q10, q11, [x0, #CTX_FP_Q10] 248*532ed618SSoby Mathew stp q12, q13, [x0, #CTX_FP_Q12] 249*532ed618SSoby Mathew stp q14, q15, [x0, #CTX_FP_Q14] 250*532ed618SSoby Mathew stp q16, q17, [x0, #CTX_FP_Q16] 251*532ed618SSoby Mathew stp q18, q19, [x0, #CTX_FP_Q18] 252*532ed618SSoby Mathew stp q20, q21, [x0, #CTX_FP_Q20] 253*532ed618SSoby Mathew stp q22, q23, [x0, #CTX_FP_Q22] 254*532ed618SSoby Mathew stp q24, q25, [x0, #CTX_FP_Q24] 255*532ed618SSoby Mathew stp q26, q27, [x0, #CTX_FP_Q26] 256*532ed618SSoby Mathew stp q28, q29, [x0, #CTX_FP_Q28] 257*532ed618SSoby Mathew stp q30, q31, [x0, #CTX_FP_Q30] 258*532ed618SSoby Mathew 259*532ed618SSoby Mathew mrs x9, fpsr 260*532ed618SSoby Mathew str x9, [x0, #CTX_FP_FPSR] 261*532ed618SSoby Mathew 262*532ed618SSoby Mathew mrs x10, fpcr 263*532ed618SSoby Mathew str x10, [x0, #CTX_FP_FPCR] 264*532ed618SSoby Mathew 265*532ed618SSoby Mathew ret 266*532ed618SSoby Mathewendfunc fpregs_context_save 267*532ed618SSoby Mathew 268*532ed618SSoby Mathew/* ----------------------------------------------------- 269*532ed618SSoby Mathew * The following function follows the aapcs_64 strictly 270*532ed618SSoby Mathew * to use x9-x17 (temporary caller-saved registers 271*532ed618SSoby Mathew * according to AArch64 PCS) to restore floating point 272*532ed618SSoby Mathew * register context. It assumes that 'x0' is pointing to 273*532ed618SSoby Mathew * a 'fp_regs' structure from where the register context 274*532ed618SSoby Mathew * will be restored. 275*532ed618SSoby Mathew * 276*532ed618SSoby Mathew * Access to VFP registers will trap if CPTR_EL3.TFP is 277*532ed618SSoby Mathew * set. However currently we don't use VFP registers 278*532ed618SSoby Mathew * nor set traps in Trusted Firmware, and assume it's 279*532ed618SSoby Mathew * cleared 280*532ed618SSoby Mathew * 281*532ed618SSoby Mathew * TODO: Revisit when VFP is used in secure world 282*532ed618SSoby Mathew * ----------------------------------------------------- 283*532ed618SSoby Mathew */ 284*532ed618SSoby Mathewfunc fpregs_context_restore 285*532ed618SSoby Mathew ldp q0, q1, [x0, #CTX_FP_Q0] 286*532ed618SSoby Mathew ldp q2, q3, [x0, #CTX_FP_Q2] 287*532ed618SSoby Mathew ldp q4, q5, [x0, #CTX_FP_Q4] 288*532ed618SSoby Mathew ldp q6, q7, [x0, #CTX_FP_Q6] 289*532ed618SSoby Mathew ldp q8, q9, [x0, #CTX_FP_Q8] 290*532ed618SSoby Mathew ldp q10, q11, [x0, #CTX_FP_Q10] 291*532ed618SSoby Mathew ldp q12, q13, [x0, #CTX_FP_Q12] 292*532ed618SSoby Mathew ldp q14, q15, [x0, #CTX_FP_Q14] 293*532ed618SSoby Mathew ldp q16, q17, [x0, #CTX_FP_Q16] 294*532ed618SSoby Mathew ldp q18, q19, [x0, #CTX_FP_Q18] 295*532ed618SSoby Mathew ldp q20, q21, [x0, #CTX_FP_Q20] 296*532ed618SSoby Mathew ldp q22, q23, [x0, #CTX_FP_Q22] 297*532ed618SSoby Mathew ldp q24, q25, [x0, #CTX_FP_Q24] 298*532ed618SSoby Mathew ldp q26, q27, [x0, #CTX_FP_Q26] 299*532ed618SSoby Mathew ldp q28, q29, [x0, #CTX_FP_Q28] 300*532ed618SSoby Mathew ldp q30, q31, [x0, #CTX_FP_Q30] 301*532ed618SSoby Mathew 302*532ed618SSoby Mathew ldr x9, [x0, #CTX_FP_FPSR] 303*532ed618SSoby Mathew msr fpsr, x9 304*532ed618SSoby Mathew 305*532ed618SSoby Mathew ldr x10, [x0, #CTX_FP_FPCR] 306*532ed618SSoby Mathew msr fpcr, x10 307*532ed618SSoby Mathew 308*532ed618SSoby Mathew /* 309*532ed618SSoby Mathew * No explict ISB required here as ERET to 310*532ed618SSoby Mathew * switch to secure EL1 or non-secure world 311*532ed618SSoby Mathew * covers it 312*532ed618SSoby Mathew */ 313*532ed618SSoby Mathew 314*532ed618SSoby Mathew ret 315*532ed618SSoby Mathewendfunc fpregs_context_restore 316*532ed618SSoby Mathew#endif /* CTX_INCLUDE_FPREGS */ 317*532ed618SSoby Mathew 318*532ed618SSoby Mathew/* ----------------------------------------------------- 319*532ed618SSoby Mathew * The following functions are used to save and restore 320*532ed618SSoby Mathew * all the general purpose registers. Ideally we would 321*532ed618SSoby Mathew * only save and restore the callee saved registers when 322*532ed618SSoby Mathew * a world switch occurs but that type of implementation 323*532ed618SSoby Mathew * is more complex. So currently we will always save and 324*532ed618SSoby Mathew * restore these registers on entry and exit of EL3. 325*532ed618SSoby Mathew * These are not macros to ensure their invocation fits 326*532ed618SSoby Mathew * within the 32 instructions per exception vector. 327*532ed618SSoby Mathew * clobbers: x18 328*532ed618SSoby Mathew * ----------------------------------------------------- 329*532ed618SSoby Mathew */ 330*532ed618SSoby Mathewfunc save_gp_registers 331*532ed618SSoby Mathew stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 332*532ed618SSoby Mathew stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 333*532ed618SSoby Mathew stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 334*532ed618SSoby Mathew stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] 335*532ed618SSoby Mathew stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] 336*532ed618SSoby Mathew stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] 337*532ed618SSoby Mathew stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] 338*532ed618SSoby Mathew stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] 339*532ed618SSoby Mathew stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] 340*532ed618SSoby Mathew stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] 341*532ed618SSoby Mathew stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] 342*532ed618SSoby Mathew stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] 343*532ed618SSoby Mathew stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] 344*532ed618SSoby Mathew stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] 345*532ed618SSoby Mathew stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] 346*532ed618SSoby Mathew mrs x18, sp_el0 347*532ed618SSoby Mathew str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] 348*532ed618SSoby Mathew ret 349*532ed618SSoby Mathewendfunc save_gp_registers 350*532ed618SSoby Mathew 351*532ed618SSoby Mathewfunc restore_gp_registers_eret 352*532ed618SSoby Mathew ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 353*532ed618SSoby Mathew ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 354*532ed618SSoby Mathew b restore_gp_registers_callee_eret 355*532ed618SSoby Mathewendfunc restore_gp_registers_eret 356*532ed618SSoby Mathew 357*532ed618SSoby Mathewfunc restore_gp_registers_callee_eret 358*532ed618SSoby Mathew ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 359*532ed618SSoby Mathew ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] 360*532ed618SSoby Mathew ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] 361*532ed618SSoby Mathew ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] 362*532ed618SSoby Mathew ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] 363*532ed618SSoby Mathew ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] 364*532ed618SSoby Mathew ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] 365*532ed618SSoby Mathew ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] 366*532ed618SSoby Mathew ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] 367*532ed618SSoby Mathew ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] 368*532ed618SSoby Mathew ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] 369*532ed618SSoby Mathew ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] 370*532ed618SSoby Mathew ldp x30, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 371*532ed618SSoby Mathew msr sp_el0, x17 372*532ed618SSoby Mathew ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] 373*532ed618SSoby Mathew eret 374*532ed618SSoby Mathewendfunc restore_gp_registers_callee_eret 375*532ed618SSoby Mathew 376*532ed618SSoby Mathew /* ----------------------------------------------------- 377*532ed618SSoby Mathew * This routine assumes that the SP_EL3 is pointing to 378*532ed618SSoby Mathew * a valid context structure from where the gp regs and 379*532ed618SSoby Mathew * other special registers can be retrieved. 380*532ed618SSoby Mathew * ----------------------------------------------------- 381*532ed618SSoby Mathew */ 382*532ed618SSoby Mathewfunc el3_exit 383*532ed618SSoby Mathew /* ----------------------------------------------------- 384*532ed618SSoby Mathew * Save the current SP_EL0 i.e. the EL3 runtime stack 385*532ed618SSoby Mathew * which will be used for handling the next SMC. Then 386*532ed618SSoby Mathew * switch to SP_EL3 387*532ed618SSoby Mathew * ----------------------------------------------------- 388*532ed618SSoby Mathew */ 389*532ed618SSoby Mathew mov x17, sp 390*532ed618SSoby Mathew msr spsel, #1 391*532ed618SSoby Mathew str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] 392*532ed618SSoby Mathew 393*532ed618SSoby Mathew /* ----------------------------------------------------- 394*532ed618SSoby Mathew * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET 395*532ed618SSoby Mathew * ----------------------------------------------------- 396*532ed618SSoby Mathew */ 397*532ed618SSoby Mathew ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] 398*532ed618SSoby Mathew ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] 399*532ed618SSoby Mathew msr scr_el3, x18 400*532ed618SSoby Mathew msr spsr_el3, x16 401*532ed618SSoby Mathew msr elr_el3, x17 402*532ed618SSoby Mathew 403*532ed618SSoby Mathew /* Restore saved general purpose registers and return */ 404*532ed618SSoby Mathew b restore_gp_registers_eret 405*532ed618SSoby Mathewendfunc el3_exit 406