1/* 2 * Copyright (c) 2013-2019, ARM Limited and Contributors. All rights reserved. 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6 7#include <arch.h> 8#include <asm_macros.S> 9#include <context.h> 10 11 .global el1_sysregs_context_save 12 .global el1_sysregs_context_restore 13#if CTX_INCLUDE_FPREGS 14 .global fpregs_context_save 15 .global fpregs_context_restore 16#endif 17 .global save_gp_pmcr_pauth_regs 18 .global restore_gp_pmcr_pauth_regs 19 .global el3_exit 20 21/* ------------------------------------------------------------------ 22 * The following function strictly follows the AArch64 PCS to use 23 * x9-x17 (temporary caller-saved registers) to save EL1 system 24 * register context. It assumes that 'x0' is pointing to a 25 * 'el1_sys_regs' structure where the register context will be saved. 26 * ------------------------------------------------------------------ 27 */ 28func el1_sysregs_context_save 29 30 mrs x9, spsr_el1 31 mrs x10, elr_el1 32 stp x9, x10, [x0, #CTX_SPSR_EL1] 33 34 mrs x15, sctlr_el1 35 mrs x16, actlr_el1 36 stp x15, x16, [x0, #CTX_SCTLR_EL1] 37 38 mrs x17, cpacr_el1 39 mrs x9, csselr_el1 40 stp x17, x9, [x0, #CTX_CPACR_EL1] 41 42 mrs x10, sp_el1 43 mrs x11, esr_el1 44 stp x10, x11, [x0, #CTX_SP_EL1] 45 46 mrs x12, ttbr0_el1 47 mrs x13, ttbr1_el1 48 stp x12, x13, [x0, #CTX_TTBR0_EL1] 49 50 mrs x14, mair_el1 51 mrs x15, amair_el1 52 stp x14, x15, [x0, #CTX_MAIR_EL1] 53 54 mrs x16, tcr_el1 55 mrs x17, tpidr_el1 56 stp x16, x17, [x0, #CTX_TCR_EL1] 57 58 mrs x9, tpidr_el0 59 mrs x10, tpidrro_el0 60 stp x9, x10, [x0, #CTX_TPIDR_EL0] 61 62 mrs x13, par_el1 63 mrs x14, far_el1 64 stp x13, x14, [x0, #CTX_PAR_EL1] 65 66 mrs x15, afsr0_el1 67 mrs x16, afsr1_el1 68 stp x15, x16, [x0, #CTX_AFSR0_EL1] 69 70 mrs x17, contextidr_el1 71 mrs x9, vbar_el1 72 stp x17, x9, [x0, #CTX_CONTEXTIDR_EL1] 73 74 /* Save AArch32 system registers if the build has instructed so */ 75#if CTX_INCLUDE_AARCH32_REGS 76 mrs x11, spsr_abt 77 mrs x12, spsr_und 78 stp x11, x12, [x0, #CTX_SPSR_ABT] 79 80 mrs x13, spsr_irq 81 mrs x14, spsr_fiq 82 stp x13, x14, [x0, #CTX_SPSR_IRQ] 83 84 mrs x15, dacr32_el2 85 mrs x16, ifsr32_el2 86 stp x15, x16, [x0, #CTX_DACR32_EL2] 87#endif 88 89 /* Save NS timer registers if the build has instructed so */ 90#if NS_TIMER_SWITCH 91 mrs x10, cntp_ctl_el0 92 mrs x11, cntp_cval_el0 93 stp x10, x11, [x0, #CTX_CNTP_CTL_EL0] 94 95 mrs x12, cntv_ctl_el0 96 mrs x13, cntv_cval_el0 97 stp x12, x13, [x0, #CTX_CNTV_CTL_EL0] 98 99 mrs x14, cntkctl_el1 100 str x14, [x0, #CTX_CNTKCTL_EL1] 101#endif 102 103 /* Save MTE system registers if the build has instructed so */ 104#if CTX_INCLUDE_MTE_REGS 105 mrs x15, TFSRE0_EL1 106 mrs x16, TFSR_EL1 107 stp x15, x16, [x0, #CTX_TFSRE0_EL1] 108 109 mrs x9, RGSR_EL1 110 mrs x10, GCR_EL1 111 stp x9, x10, [x0, #CTX_RGSR_EL1] 112#endif 113 114 ret 115endfunc el1_sysregs_context_save 116 117/* ------------------------------------------------------------------ 118 * The following function strictly follows the AArch64 PCS to use 119 * x9-x17 (temporary caller-saved registers) to restore EL1 system 120 * register context. It assumes that 'x0' is pointing to a 121 * 'el1_sys_regs' structure from where the register context will be 122 * restored 123 * ------------------------------------------------------------------ 124 */ 125func el1_sysregs_context_restore 126 127 ldp x9, x10, [x0, #CTX_SPSR_EL1] 128 msr spsr_el1, x9 129 msr elr_el1, x10 130 131 ldp x15, x16, [x0, #CTX_SCTLR_EL1] 132 msr sctlr_el1, x15 133 msr actlr_el1, x16 134 135 ldp x17, x9, [x0, #CTX_CPACR_EL1] 136 msr cpacr_el1, x17 137 msr csselr_el1, x9 138 139 ldp x10, x11, [x0, #CTX_SP_EL1] 140 msr sp_el1, x10 141 msr esr_el1, x11 142 143 ldp x12, x13, [x0, #CTX_TTBR0_EL1] 144 msr ttbr0_el1, x12 145 msr ttbr1_el1, x13 146 147 ldp x14, x15, [x0, #CTX_MAIR_EL1] 148 msr mair_el1, x14 149 msr amair_el1, x15 150 151 ldp x16, x17, [x0, #CTX_TCR_EL1] 152 msr tcr_el1, x16 153 msr tpidr_el1, x17 154 155 ldp x9, x10, [x0, #CTX_TPIDR_EL0] 156 msr tpidr_el0, x9 157 msr tpidrro_el0, x10 158 159 ldp x13, x14, [x0, #CTX_PAR_EL1] 160 msr par_el1, x13 161 msr far_el1, x14 162 163 ldp x15, x16, [x0, #CTX_AFSR0_EL1] 164 msr afsr0_el1, x15 165 msr afsr1_el1, x16 166 167 ldp x17, x9, [x0, #CTX_CONTEXTIDR_EL1] 168 msr contextidr_el1, x17 169 msr vbar_el1, x9 170 171 /* Restore AArch32 system registers if the build has instructed so */ 172#if CTX_INCLUDE_AARCH32_REGS 173 ldp x11, x12, [x0, #CTX_SPSR_ABT] 174 msr spsr_abt, x11 175 msr spsr_und, x12 176 177 ldp x13, x14, [x0, #CTX_SPSR_IRQ] 178 msr spsr_irq, x13 179 msr spsr_fiq, x14 180 181 ldp x15, x16, [x0, #CTX_DACR32_EL2] 182 msr dacr32_el2, x15 183 msr ifsr32_el2, x16 184#endif 185 /* Restore NS timer registers if the build has instructed so */ 186#if NS_TIMER_SWITCH 187 ldp x10, x11, [x0, #CTX_CNTP_CTL_EL0] 188 msr cntp_ctl_el0, x10 189 msr cntp_cval_el0, x11 190 191 ldp x12, x13, [x0, #CTX_CNTV_CTL_EL0] 192 msr cntv_ctl_el0, x12 193 msr cntv_cval_el0, x13 194 195 ldr x14, [x0, #CTX_CNTKCTL_EL1] 196 msr cntkctl_el1, x14 197#endif 198 /* Restore MTE system registers if the build has instructed so */ 199#if CTX_INCLUDE_MTE_REGS 200 ldp x11, x12, [x0, #CTX_TFSRE0_EL1] 201 msr TFSRE0_EL1, x11 202 msr TFSR_EL1, x12 203 204 ldp x13, x14, [x0, #CTX_RGSR_EL1] 205 msr RGSR_EL1, x13 206 msr GCR_EL1, x14 207#endif 208 209 /* No explict ISB required here as ERET covers it */ 210 ret 211endfunc el1_sysregs_context_restore 212 213/* ------------------------------------------------------------------ 214 * The following function follows the aapcs_64 strictly to use 215 * x9-x17 (temporary caller-saved registers according to AArch64 PCS) 216 * to save floating point register context. It assumes that 'x0' is 217 * pointing to a 'fp_regs' structure where the register context will 218 * be saved. 219 * 220 * Access to VFP registers will trap if CPTR_EL3.TFP is set. 221 * However currently we don't use VFP registers nor set traps in 222 * Trusted Firmware, and assume it's cleared. 223 * 224 * TODO: Revisit when VFP is used in secure world 225 * ------------------------------------------------------------------ 226 */ 227#if CTX_INCLUDE_FPREGS 228func fpregs_context_save 229 stp q0, q1, [x0, #CTX_FP_Q0] 230 stp q2, q3, [x0, #CTX_FP_Q2] 231 stp q4, q5, [x0, #CTX_FP_Q4] 232 stp q6, q7, [x0, #CTX_FP_Q6] 233 stp q8, q9, [x0, #CTX_FP_Q8] 234 stp q10, q11, [x0, #CTX_FP_Q10] 235 stp q12, q13, [x0, #CTX_FP_Q12] 236 stp q14, q15, [x0, #CTX_FP_Q14] 237 stp q16, q17, [x0, #CTX_FP_Q16] 238 stp q18, q19, [x0, #CTX_FP_Q18] 239 stp q20, q21, [x0, #CTX_FP_Q20] 240 stp q22, q23, [x0, #CTX_FP_Q22] 241 stp q24, q25, [x0, #CTX_FP_Q24] 242 stp q26, q27, [x0, #CTX_FP_Q26] 243 stp q28, q29, [x0, #CTX_FP_Q28] 244 stp q30, q31, [x0, #CTX_FP_Q30] 245 246 mrs x9, fpsr 247 str x9, [x0, #CTX_FP_FPSR] 248 249 mrs x10, fpcr 250 str x10, [x0, #CTX_FP_FPCR] 251 252#if CTX_INCLUDE_AARCH32_REGS 253 mrs x11, fpexc32_el2 254 str x11, [x0, #CTX_FP_FPEXC32_EL2] 255#endif 256 ret 257endfunc fpregs_context_save 258 259/* ------------------------------------------------------------------ 260 * The following function follows the aapcs_64 strictly to use x9-x17 261 * (temporary caller-saved registers according to AArch64 PCS) to 262 * restore floating point register context. It assumes that 'x0' is 263 * pointing to a 'fp_regs' structure from where the register context 264 * will be restored. 265 * 266 * Access to VFP registers will trap if CPTR_EL3.TFP is set. 267 * However currently we don't use VFP registers nor set traps in 268 * Trusted Firmware, and assume it's cleared. 269 * 270 * TODO: Revisit when VFP is used in secure world 271 * ------------------------------------------------------------------ 272 */ 273func fpregs_context_restore 274 ldp q0, q1, [x0, #CTX_FP_Q0] 275 ldp q2, q3, [x0, #CTX_FP_Q2] 276 ldp q4, q5, [x0, #CTX_FP_Q4] 277 ldp q6, q7, [x0, #CTX_FP_Q6] 278 ldp q8, q9, [x0, #CTX_FP_Q8] 279 ldp q10, q11, [x0, #CTX_FP_Q10] 280 ldp q12, q13, [x0, #CTX_FP_Q12] 281 ldp q14, q15, [x0, #CTX_FP_Q14] 282 ldp q16, q17, [x0, #CTX_FP_Q16] 283 ldp q18, q19, [x0, #CTX_FP_Q18] 284 ldp q20, q21, [x0, #CTX_FP_Q20] 285 ldp q22, q23, [x0, #CTX_FP_Q22] 286 ldp q24, q25, [x0, #CTX_FP_Q24] 287 ldp q26, q27, [x0, #CTX_FP_Q26] 288 ldp q28, q29, [x0, #CTX_FP_Q28] 289 ldp q30, q31, [x0, #CTX_FP_Q30] 290 291 ldr x9, [x0, #CTX_FP_FPSR] 292 msr fpsr, x9 293 294 ldr x10, [x0, #CTX_FP_FPCR] 295 msr fpcr, x10 296 297#if CTX_INCLUDE_AARCH32_REGS 298 ldr x11, [x0, #CTX_FP_FPEXC32_EL2] 299 msr fpexc32_el2, x11 300#endif 301 /* 302 * No explict ISB required here as ERET to 303 * switch to secure EL1 or non-secure world 304 * covers it 305 */ 306 307 ret 308endfunc fpregs_context_restore 309#endif /* CTX_INCLUDE_FPREGS */ 310 311/* ------------------------------------------------------------------ 312 * The following function is used to save and restore all the general 313 * purpose and ARMv8.3-PAuth (if enabled) registers. 314 * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3 315 * when ARMv8.5-PMU is implemented, and if called from Non-secure 316 * state saves PMCR_EL0 and disables Cycle Counter. 317 * 318 * Ideally we would only save and restore the callee saved registers 319 * when a world switch occurs but that type of implementation is more 320 * complex. So currently we will always save and restore these 321 * registers on entry and exit of EL3. 322 * These are not macros to ensure their invocation fits within the 32 323 * instructions per exception vector. 324 * clobbers: x18 325 * ------------------------------------------------------------------ 326 */ 327func save_gp_pmcr_pauth_regs 328 stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 329 stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 330 stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 331 stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] 332 stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] 333 stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] 334 stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] 335 stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] 336 stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] 337 stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] 338 stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] 339 stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] 340 stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] 341 stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] 342 stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] 343 mrs x18, sp_el0 344 str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] 345 346 /* ---------------------------------------------------------- 347 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed, 348 * meaning that ARMv8-PMU is not implemented and PMCR_EL0 349 * should be saved in non-secure context. 350 * ---------------------------------------------------------- 351 */ 352 mrs x9, mdcr_el3 353 tst x9, #MDCR_SCCD_BIT 354 bne 1f 355 356 /* Secure Cycle Counter is not disabled */ 357 mrs x9, pmcr_el0 358 359 /* Check caller's security state */ 360 mrs x10, scr_el3 361 tst x10, #SCR_NS_BIT 362 beq 2f 363 364 /* Save PMCR_EL0 if called from Non-secure state */ 365 str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] 366 367 /* Disable cycle counter when event counting is prohibited */ 3682: orr x9, x9, #PMCR_EL0_DP_BIT 369 msr pmcr_el0, x9 370 isb 3711: 372#if CTX_INCLUDE_PAUTH_REGS 373 /* ---------------------------------------------------------- 374 * Save the ARMv8.3-PAuth keys as they are not banked 375 * by exception level 376 * ---------------------------------------------------------- 377 */ 378 add x19, sp, #CTX_PAUTH_REGS_OFFSET 379 380 mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */ 381 mrs x21, APIAKeyHi_EL1 382 mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */ 383 mrs x23, APIBKeyHi_EL1 384 mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */ 385 mrs x25, APDAKeyHi_EL1 386 mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */ 387 mrs x27, APDBKeyHi_EL1 388 mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */ 389 mrs x29, APGAKeyHi_EL1 390 391 stp x20, x21, [x19, #CTX_PACIAKEY_LO] 392 stp x22, x23, [x19, #CTX_PACIBKEY_LO] 393 stp x24, x25, [x19, #CTX_PACDAKEY_LO] 394 stp x26, x27, [x19, #CTX_PACDBKEY_LO] 395 stp x28, x29, [x19, #CTX_PACGAKEY_LO] 396#endif /* CTX_INCLUDE_PAUTH_REGS */ 397 398 ret 399endfunc save_gp_pmcr_pauth_regs 400 401/* ------------------------------------------------------------------ 402 * This function restores ARMv8.3-PAuth (if enabled) and all general 403 * purpose registers except x30 from the CPU context. 404 * x30 register must be explicitly restored by the caller. 405 * ------------------------------------------------------------------ 406 */ 407func restore_gp_pmcr_pauth_regs 408#if CTX_INCLUDE_PAUTH_REGS 409 /* Restore the ARMv8.3 PAuth keys */ 410 add x10, sp, #CTX_PAUTH_REGS_OFFSET 411 412 ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */ 413 ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */ 414 ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */ 415 ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */ 416 ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */ 417 418 msr APIAKeyLo_EL1, x0 419 msr APIAKeyHi_EL1, x1 420 msr APIBKeyLo_EL1, x2 421 msr APIBKeyHi_EL1, x3 422 msr APDAKeyLo_EL1, x4 423 msr APDAKeyHi_EL1, x5 424 msr APDBKeyLo_EL1, x6 425 msr APDBKeyHi_EL1, x7 426 msr APGAKeyLo_EL1, x8 427 msr APGAKeyHi_EL1, x9 428#endif /* CTX_INCLUDE_PAUTH_REGS */ 429 430 /* ---------------------------------------------------------- 431 * Restore PMCR_EL0 when returning to Non-secure state if 432 * Secure Cycle Counter is not disabled in MDCR_EL3 when 433 * ARMv8.5-PMU is implemented. 434 * ---------------------------------------------------------- 435 */ 436 mrs x0, scr_el3 437 tst x0, #SCR_NS_BIT 438 beq 2f 439 440 /* ---------------------------------------------------------- 441 * Back to Non-secure state. 442 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed, 443 * meaning that ARMv8-PMU is not implemented and PMCR_EL0 444 * should be restored from non-secure context. 445 * ---------------------------------------------------------- 446 */ 447 mrs x0, mdcr_el3 448 tst x0, #MDCR_SCCD_BIT 449 bne 2f 450 ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] 451 msr pmcr_el0, x0 4522: 453 ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 454 ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 455 ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 456 ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] 457 ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] 458 ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] 459 ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] 460 ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] 461 ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] 462 ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] 463 ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] 464 ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] 465 ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] 466 ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] 467 ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] 468 msr sp_el0, x28 469 ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] 470 ret 471endfunc restore_gp_pmcr_pauth_regs 472 473/* ------------------------------------------------------------------ 474 * This routine assumes that the SP_EL3 is pointing to a valid 475 * context structure from where the gp regs and other special 476 * registers can be retrieved. 477 * ------------------------------------------------------------------ 478 */ 479func el3_exit 480 /* ---------------------------------------------------------- 481 * Save the current SP_EL0 i.e. the EL3 runtime stack which 482 * will be used for handling the next SMC. 483 * Then switch to SP_EL3. 484 * ---------------------------------------------------------- 485 */ 486 mov x17, sp 487 msr spsel, #MODE_SP_ELX 488 str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] 489 490 /* ---------------------------------------------------------- 491 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET 492 * ---------------------------------------------------------- 493 */ 494 ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] 495 ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] 496 msr scr_el3, x18 497 msr spsr_el3, x16 498 msr elr_el3, x17 499 500#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 501 /* ---------------------------------------------------------- 502 * Restore mitigation state as it was on entry to EL3 503 * ---------------------------------------------------------- 504 */ 505 ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE] 506 cbz x17, 1f 507 blr x17 5081: 509#endif 510 /* ---------------------------------------------------------- 511 * Restore general purpose (including x30), PMCR_EL0 and 512 * ARMv8.3-PAuth registers. 513 * Exit EL3 via ERET to a lower exception level. 514 * ---------------------------------------------------------- 515 */ 516 bl restore_gp_pmcr_pauth_regs 517 ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 518 519#if IMAGE_BL31 && RAS_EXTENSION 520 /* ---------------------------------------------------------- 521 * Issue Error Synchronization Barrier to synchronize SErrors 522 * before exiting EL3. We're running with EAs unmasked, so 523 * any synchronized errors would be taken immediately; 524 * therefore no need to inspect DISR_EL1 register. 525 * ---------------------------------------------------------- 526 */ 527 esb 528#endif 529 eret 530 531endfunc el3_exit 532