1/* 2 * Copyright (c) 2013-2023, Arm Limited and Contributors. All rights reserved. 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6 7#include <arch.h> 8#include <asm_macros.S> 9#include <assert_macros.S> 10#include <context.h> 11#include <el3_common_macros.S> 12 13#if CTX_INCLUDE_EL2_REGS 14 .global el2_sysregs_context_save_common 15 .global el2_sysregs_context_restore_common 16#if CTX_INCLUDE_MTE_REGS 17 .global el2_sysregs_context_save_mte 18 .global el2_sysregs_context_restore_mte 19#endif /* CTX_INCLUDE_MTE_REGS */ 20#if ENABLE_FEAT_ECV 21 .global el2_sysregs_context_save_ecv 22 .global el2_sysregs_context_restore_ecv 23#endif /* ENABLE_FEAT_ECV */ 24#if ENABLE_FEAT_VHE 25 .global el2_sysregs_context_save_vhe 26 .global el2_sysregs_context_restore_vhe 27#endif /* ENABLE_FEAT_VHE */ 28#if RAS_EXTENSION 29 .global el2_sysregs_context_save_ras 30 .global el2_sysregs_context_restore_ras 31#endif /* RAS_EXTENSION */ 32#if CTX_INCLUDE_NEVE_REGS 33 .global el2_sysregs_context_save_nv2 34 .global el2_sysregs_context_restore_nv2 35#endif /* CTX_INCLUDE_NEVE_REGS */ 36#if ENABLE_FEAT_CSV2_2 37 .global el2_sysregs_context_save_csv2 38 .global el2_sysregs_context_restore_csv2 39#endif /* ENABLE_FEAT_CSV2_2 */ 40#endif /* CTX_INCLUDE_EL2_REGS */ 41 42 .global el1_sysregs_context_save 43 .global el1_sysregs_context_restore 44#if CTX_INCLUDE_FPREGS 45 .global fpregs_context_save 46 .global fpregs_context_restore 47#endif /* CTX_INCLUDE_FPREGS */ 48 .global prepare_el3_entry 49 .global restore_gp_pmcr_pauth_regs 50 .global save_and_update_ptw_el1_sys_regs 51 .global el3_exit 52 53#if CTX_INCLUDE_EL2_REGS 54 55/* ----------------------------------------------------- 56 * The following functions strictly follow the AArch64 57 * PCS to use x9-x16 (temporary caller-saved registers) 58 * to save/restore EL2 system register context. 59 * el2_sysregs_context_save/restore_common functions 60 * save and restore registers that are common to all 61 * configurations. The rest of the functions save and 62 * restore EL2 system registers that are present when a 63 * particular feature is enabled. All functions assume 64 * that 'x0' is pointing to a 'el2_sys_regs' structure 65 * where the register context will be saved/restored. 66 * 67 * The following registers are not added. 68 * AMEVCNTVOFF0<n>_EL2 69 * AMEVCNTVOFF1<n>_EL2 70 * ICH_AP0R<n>_EL2 71 * ICH_AP1R<n>_EL2 72 * ICH_LR<n>_EL2 73 * ----------------------------------------------------- 74 */ 75func el2_sysregs_context_save_common 76 mrs x9, actlr_el2 77 mrs x10, afsr0_el2 78 stp x9, x10, [x0, #CTX_ACTLR_EL2] 79 80 mrs x11, afsr1_el2 81 mrs x12, amair_el2 82 stp x11, x12, [x0, #CTX_AFSR1_EL2] 83 84 mrs x13, cnthctl_el2 85 mrs x14, cntvoff_el2 86 stp x13, x14, [x0, #CTX_CNTHCTL_EL2] 87 88 mrs x15, cptr_el2 89 str x15, [x0, #CTX_CPTR_EL2] 90 91#if CTX_INCLUDE_AARCH32_REGS 92 mrs x16, dbgvcr32_el2 93 str x16, [x0, #CTX_DBGVCR32_EL2] 94#endif /* CTX_INCLUDE_AARCH32_REGS */ 95 96 mrs x9, elr_el2 97 mrs x10, esr_el2 98 stp x9, x10, [x0, #CTX_ELR_EL2] 99 100 mrs x11, far_el2 101 mrs x12, hacr_el2 102 stp x11, x12, [x0, #CTX_FAR_EL2] 103 104 mrs x13, hcr_el2 105 mrs x14, hpfar_el2 106 stp x13, x14, [x0, #CTX_HCR_EL2] 107 108 mrs x15, hstr_el2 109 mrs x16, ICC_SRE_EL2 110 stp x15, x16, [x0, #CTX_HSTR_EL2] 111 112 mrs x9, ICH_HCR_EL2 113 mrs x10, ICH_VMCR_EL2 114 stp x9, x10, [x0, #CTX_ICH_HCR_EL2] 115 116 mrs x11, mair_el2 117 mrs x12, mdcr_el2 118 stp x11, x12, [x0, #CTX_MAIR_EL2] 119 120 mrs x14, sctlr_el2 121 str x14, [x0, #CTX_SCTLR_EL2] 122 123 mrs x15, spsr_el2 124 mrs x16, sp_el2 125 stp x15, x16, [x0, #CTX_SPSR_EL2] 126 127 mrs x9, tcr_el2 128 mrs x10, tpidr_el2 129 stp x9, x10, [x0, #CTX_TCR_EL2] 130 131 mrs x11, ttbr0_el2 132 mrs x12, vbar_el2 133 stp x11, x12, [x0, #CTX_TTBR0_EL2] 134 135 mrs x13, vmpidr_el2 136 mrs x14, vpidr_el2 137 stp x13, x14, [x0, #CTX_VMPIDR_EL2] 138 139 mrs x15, vtcr_el2 140 mrs x16, vttbr_el2 141 stp x15, x16, [x0, #CTX_VTCR_EL2] 142 ret 143endfunc el2_sysregs_context_save_common 144 145func el2_sysregs_context_restore_common 146 ldp x9, x10, [x0, #CTX_ACTLR_EL2] 147 msr actlr_el2, x9 148 msr afsr0_el2, x10 149 150 ldp x11, x12, [x0, #CTX_AFSR1_EL2] 151 msr afsr1_el2, x11 152 msr amair_el2, x12 153 154 ldp x13, x14, [x0, #CTX_CNTHCTL_EL2] 155 msr cnthctl_el2, x13 156 msr cntvoff_el2, x14 157 158 ldr x15, [x0, #CTX_CPTR_EL2] 159 msr cptr_el2, x15 160 161#if CTX_INCLUDE_AARCH32_REGS 162 ldr x16, [x0, #CTX_DBGVCR32_EL2] 163 msr dbgvcr32_el2, x16 164#endif /* CTX_INCLUDE_AARCH32_REGS */ 165 166 ldp x9, x10, [x0, #CTX_ELR_EL2] 167 msr elr_el2, x9 168 msr esr_el2, x10 169 170 ldp x11, x12, [x0, #CTX_FAR_EL2] 171 msr far_el2, x11 172 msr hacr_el2, x12 173 174 ldp x13, x14, [x0, #CTX_HCR_EL2] 175 msr hcr_el2, x13 176 msr hpfar_el2, x14 177 178 ldp x15, x16, [x0, #CTX_HSTR_EL2] 179 msr hstr_el2, x15 180 msr ICC_SRE_EL2, x16 181 182 ldp x9, x10, [x0, #CTX_ICH_HCR_EL2] 183 msr ICH_HCR_EL2, x9 184 msr ICH_VMCR_EL2, x10 185 186 ldp x11, x12, [x0, #CTX_MAIR_EL2] 187 msr mair_el2, x11 188 msr mdcr_el2, x12 189 190 ldr x14, [x0, #CTX_SCTLR_EL2] 191 msr sctlr_el2, x14 192 193 ldp x15, x16, [x0, #CTX_SPSR_EL2] 194 msr spsr_el2, x15 195 msr sp_el2, x16 196 197 ldp x9, x10, [x0, #CTX_TCR_EL2] 198 msr tcr_el2, x9 199 msr tpidr_el2, x10 200 201 ldp x11, x12, [x0, #CTX_TTBR0_EL2] 202 msr ttbr0_el2, x11 203 msr vbar_el2, x12 204 205 ldp x13, x14, [x0, #CTX_VMPIDR_EL2] 206 msr vmpidr_el2, x13 207 msr vpidr_el2, x14 208 209 ldp x15, x16, [x0, #CTX_VTCR_EL2] 210 msr vtcr_el2, x15 211 msr vttbr_el2, x16 212 ret 213endfunc el2_sysregs_context_restore_common 214 215#if CTX_INCLUDE_MTE_REGS 216func el2_sysregs_context_save_mte 217 mrs x9, TFSR_EL2 218 str x9, [x0, #CTX_TFSR_EL2] 219 ret 220endfunc el2_sysregs_context_save_mte 221 222func el2_sysregs_context_restore_mte 223 ldr x9, [x0, #CTX_TFSR_EL2] 224 msr TFSR_EL2, x9 225 ret 226endfunc el2_sysregs_context_restore_mte 227#endif /* CTX_INCLUDE_MTE_REGS */ 228 229#if ENABLE_FEAT_ECV 230func el2_sysregs_context_save_ecv 231 mrs x11, CNTPOFF_EL2 232 str x11, [x0, #CTX_CNTPOFF_EL2] 233 ret 234endfunc el2_sysregs_context_save_ecv 235 236func el2_sysregs_context_restore_ecv 237 ldr x11, [x0, #CTX_CNTPOFF_EL2] 238 msr CNTPOFF_EL2, x11 239 ret 240endfunc el2_sysregs_context_restore_ecv 241#endif /* ENABLE_FEAT_ECV */ 242 243#if ENABLE_FEAT_VHE 244func el2_sysregs_context_save_vhe 245 /* 246 * CONTEXTIDR_EL2 register is saved only when FEAT_VHE or 247 * FEAT_Debugv8p2 (currently not in TF-A) is supported. 248 */ 249 mrs x9, contextidr_el2 250 mrs x10, ttbr1_el2 251 stp x9, x10, [x0, #CTX_CONTEXTIDR_EL2] 252 ret 253endfunc el2_sysregs_context_save_vhe 254 255func el2_sysregs_context_restore_vhe 256 /* 257 * CONTEXTIDR_EL2 register is restored only when FEAT_VHE or 258 * FEAT_Debugv8p2 (currently not in TF-A) is supported. 259 */ 260 ldp x9, x10, [x0, #CTX_CONTEXTIDR_EL2] 261 msr contextidr_el2, x9 262 msr ttbr1_el2, x10 263 ret 264endfunc el2_sysregs_context_restore_vhe 265#endif /* ENABLE_FEAT_VHE */ 266 267#if RAS_EXTENSION 268func el2_sysregs_context_save_ras 269 /* 270 * VDISR_EL2 and VSESR_EL2 registers are saved only when 271 * FEAT_RAS is supported. 272 */ 273 mrs x11, vdisr_el2 274 mrs x12, vsesr_el2 275 stp x11, x12, [x0, #CTX_VDISR_EL2] 276 ret 277endfunc el2_sysregs_context_save_ras 278 279func el2_sysregs_context_restore_ras 280 /* 281 * VDISR_EL2 and VSESR_EL2 registers are restored only when FEAT_RAS 282 * is supported. 283 */ 284 ldp x11, x12, [x0, #CTX_VDISR_EL2] 285 msr vdisr_el2, x11 286 msr vsesr_el2, x12 287 ret 288endfunc el2_sysregs_context_restore_ras 289#endif /* RAS_EXTENSION */ 290 291#if CTX_INCLUDE_NEVE_REGS 292func el2_sysregs_context_save_nv2 293 /* 294 * VNCR_EL2 register is saved only when FEAT_NV2 is supported. 295 */ 296 mrs x16, vncr_el2 297 str x16, [x0, #CTX_VNCR_EL2] 298 ret 299endfunc el2_sysregs_context_save_nv2 300 301func el2_sysregs_context_restore_nv2 302 /* 303 * VNCR_EL2 register is restored only when FEAT_NV2 is supported. 304 */ 305 ldr x16, [x0, #CTX_VNCR_EL2] 306 msr vncr_el2, x16 307 ret 308endfunc el2_sysregs_context_restore_nv2 309#endif /* CTX_INCLUDE_NEVE_REGS */ 310 311#if ENABLE_FEAT_CSV2_2 312func el2_sysregs_context_save_csv2 313 /* 314 * SCXTNUM_EL2 register is saved only when FEAT_CSV2_2 is supported. 315 */ 316 mrs x13, scxtnum_el2 317 str x13, [x0, #CTX_SCXTNUM_EL2] 318 ret 319endfunc el2_sysregs_context_save_csv2 320 321func el2_sysregs_context_restore_csv2 322 /* 323 * SCXTNUM_EL2 register is restored only when FEAT_CSV2_2 is supported. 324 */ 325 ldr x13, [x0, #CTX_SCXTNUM_EL2] 326 msr scxtnum_el2, x13 327 ret 328endfunc el2_sysregs_context_restore_csv2 329#endif /* ENABLE_FEAT_CSV2_2 */ 330 331#endif /* CTX_INCLUDE_EL2_REGS */ 332 333/* ------------------------------------------------------------------ 334 * The following function strictly follows the AArch64 PCS to use 335 * x9-x17 (temporary caller-saved registers) to save EL1 system 336 * register context. It assumes that 'x0' is pointing to a 337 * 'el1_sys_regs' structure where the register context will be saved. 338 * ------------------------------------------------------------------ 339 */ 340func el1_sysregs_context_save 341 342 mrs x9, spsr_el1 343 mrs x10, elr_el1 344 stp x9, x10, [x0, #CTX_SPSR_EL1] 345 346#if !ERRATA_SPECULATIVE_AT 347 mrs x15, sctlr_el1 348 mrs x16, tcr_el1 349 stp x15, x16, [x0, #CTX_SCTLR_EL1] 350#endif /* ERRATA_SPECULATIVE_AT */ 351 352 mrs x17, cpacr_el1 353 mrs x9, csselr_el1 354 stp x17, x9, [x0, #CTX_CPACR_EL1] 355 356 mrs x10, sp_el1 357 mrs x11, esr_el1 358 stp x10, x11, [x0, #CTX_SP_EL1] 359 360 mrs x12, ttbr0_el1 361 mrs x13, ttbr1_el1 362 stp x12, x13, [x0, #CTX_TTBR0_EL1] 363 364 mrs x14, mair_el1 365 mrs x15, amair_el1 366 stp x14, x15, [x0, #CTX_MAIR_EL1] 367 368 mrs x16, actlr_el1 369 mrs x17, tpidr_el1 370 stp x16, x17, [x0, #CTX_ACTLR_EL1] 371 372 mrs x9, tpidr_el0 373 mrs x10, tpidrro_el0 374 stp x9, x10, [x0, #CTX_TPIDR_EL0] 375 376 mrs x13, par_el1 377 mrs x14, far_el1 378 stp x13, x14, [x0, #CTX_PAR_EL1] 379 380 mrs x15, afsr0_el1 381 mrs x16, afsr1_el1 382 stp x15, x16, [x0, #CTX_AFSR0_EL1] 383 384 mrs x17, contextidr_el1 385 mrs x9, vbar_el1 386 stp x17, x9, [x0, #CTX_CONTEXTIDR_EL1] 387 388 /* Save AArch32 system registers if the build has instructed so */ 389#if CTX_INCLUDE_AARCH32_REGS 390 mrs x11, spsr_abt 391 mrs x12, spsr_und 392 stp x11, x12, [x0, #CTX_SPSR_ABT] 393 394 mrs x13, spsr_irq 395 mrs x14, spsr_fiq 396 stp x13, x14, [x0, #CTX_SPSR_IRQ] 397 398 mrs x15, dacr32_el2 399 mrs x16, ifsr32_el2 400 stp x15, x16, [x0, #CTX_DACR32_EL2] 401#endif /* CTX_INCLUDE_AARCH32_REGS */ 402 403 /* Save NS timer registers if the build has instructed so */ 404#if NS_TIMER_SWITCH 405 mrs x10, cntp_ctl_el0 406 mrs x11, cntp_cval_el0 407 stp x10, x11, [x0, #CTX_CNTP_CTL_EL0] 408 409 mrs x12, cntv_ctl_el0 410 mrs x13, cntv_cval_el0 411 stp x12, x13, [x0, #CTX_CNTV_CTL_EL0] 412 413 mrs x14, cntkctl_el1 414 str x14, [x0, #CTX_CNTKCTL_EL1] 415#endif /* NS_TIMER_SWITCH */ 416 417 /* Save MTE system registers if the build has instructed so */ 418#if CTX_INCLUDE_MTE_REGS 419 mrs x15, TFSRE0_EL1 420 mrs x16, TFSR_EL1 421 stp x15, x16, [x0, #CTX_TFSRE0_EL1] 422 423 mrs x9, RGSR_EL1 424 mrs x10, GCR_EL1 425 stp x9, x10, [x0, #CTX_RGSR_EL1] 426#endif /* CTX_INCLUDE_MTE_REGS */ 427 428 ret 429endfunc el1_sysregs_context_save 430 431/* ------------------------------------------------------------------ 432 * The following function strictly follows the AArch64 PCS to use 433 * x9-x17 (temporary caller-saved registers) to restore EL1 system 434 * register context. It assumes that 'x0' is pointing to a 435 * 'el1_sys_regs' structure from where the register context will be 436 * restored 437 * ------------------------------------------------------------------ 438 */ 439func el1_sysregs_context_restore 440 441 ldp x9, x10, [x0, #CTX_SPSR_EL1] 442 msr spsr_el1, x9 443 msr elr_el1, x10 444 445#if !ERRATA_SPECULATIVE_AT 446 ldp x15, x16, [x0, #CTX_SCTLR_EL1] 447 msr sctlr_el1, x15 448 msr tcr_el1, x16 449#endif /* ERRATA_SPECULATIVE_AT */ 450 451 ldp x17, x9, [x0, #CTX_CPACR_EL1] 452 msr cpacr_el1, x17 453 msr csselr_el1, x9 454 455 ldp x10, x11, [x0, #CTX_SP_EL1] 456 msr sp_el1, x10 457 msr esr_el1, x11 458 459 ldp x12, x13, [x0, #CTX_TTBR0_EL1] 460 msr ttbr0_el1, x12 461 msr ttbr1_el1, x13 462 463 ldp x14, x15, [x0, #CTX_MAIR_EL1] 464 msr mair_el1, x14 465 msr amair_el1, x15 466 467 ldp x16, x17, [x0, #CTX_ACTLR_EL1] 468 msr actlr_el1, x16 469 msr tpidr_el1, x17 470 471 ldp x9, x10, [x0, #CTX_TPIDR_EL0] 472 msr tpidr_el0, x9 473 msr tpidrro_el0, x10 474 475 ldp x13, x14, [x0, #CTX_PAR_EL1] 476 msr par_el1, x13 477 msr far_el1, x14 478 479 ldp x15, x16, [x0, #CTX_AFSR0_EL1] 480 msr afsr0_el1, x15 481 msr afsr1_el1, x16 482 483 ldp x17, x9, [x0, #CTX_CONTEXTIDR_EL1] 484 msr contextidr_el1, x17 485 msr vbar_el1, x9 486 487 /* Restore AArch32 system registers if the build has instructed so */ 488#if CTX_INCLUDE_AARCH32_REGS 489 ldp x11, x12, [x0, #CTX_SPSR_ABT] 490 msr spsr_abt, x11 491 msr spsr_und, x12 492 493 ldp x13, x14, [x0, #CTX_SPSR_IRQ] 494 msr spsr_irq, x13 495 msr spsr_fiq, x14 496 497 ldp x15, x16, [x0, #CTX_DACR32_EL2] 498 msr dacr32_el2, x15 499 msr ifsr32_el2, x16 500#endif /* CTX_INCLUDE_AARCH32_REGS */ 501 502 /* Restore NS timer registers if the build has instructed so */ 503#if NS_TIMER_SWITCH 504 ldp x10, x11, [x0, #CTX_CNTP_CTL_EL0] 505 msr cntp_ctl_el0, x10 506 msr cntp_cval_el0, x11 507 508 ldp x12, x13, [x0, #CTX_CNTV_CTL_EL0] 509 msr cntv_ctl_el0, x12 510 msr cntv_cval_el0, x13 511 512 ldr x14, [x0, #CTX_CNTKCTL_EL1] 513 msr cntkctl_el1, x14 514#endif /* NS_TIMER_SWITCH */ 515 516 /* Restore MTE system registers if the build has instructed so */ 517#if CTX_INCLUDE_MTE_REGS 518 ldp x11, x12, [x0, #CTX_TFSRE0_EL1] 519 msr TFSRE0_EL1, x11 520 msr TFSR_EL1, x12 521 522 ldp x13, x14, [x0, #CTX_RGSR_EL1] 523 msr RGSR_EL1, x13 524 msr GCR_EL1, x14 525#endif /* CTX_INCLUDE_MTE_REGS */ 526 527 /* No explict ISB required here as ERET covers it */ 528 ret 529endfunc el1_sysregs_context_restore 530 531/* ------------------------------------------------------------------ 532 * The following function follows the aapcs_64 strictly to use 533 * x9-x17 (temporary caller-saved registers according to AArch64 PCS) 534 * to save floating point register context. It assumes that 'x0' is 535 * pointing to a 'fp_regs' structure where the register context will 536 * be saved. 537 * 538 * Access to VFP registers will trap if CPTR_EL3.TFP is set. 539 * However currently we don't use VFP registers nor set traps in 540 * Trusted Firmware, and assume it's cleared. 541 * 542 * TODO: Revisit when VFP is used in secure world 543 * ------------------------------------------------------------------ 544 */ 545#if CTX_INCLUDE_FPREGS 546func fpregs_context_save 547 stp q0, q1, [x0, #CTX_FP_Q0] 548 stp q2, q3, [x0, #CTX_FP_Q2] 549 stp q4, q5, [x0, #CTX_FP_Q4] 550 stp q6, q7, [x0, #CTX_FP_Q6] 551 stp q8, q9, [x0, #CTX_FP_Q8] 552 stp q10, q11, [x0, #CTX_FP_Q10] 553 stp q12, q13, [x0, #CTX_FP_Q12] 554 stp q14, q15, [x0, #CTX_FP_Q14] 555 stp q16, q17, [x0, #CTX_FP_Q16] 556 stp q18, q19, [x0, #CTX_FP_Q18] 557 stp q20, q21, [x0, #CTX_FP_Q20] 558 stp q22, q23, [x0, #CTX_FP_Q22] 559 stp q24, q25, [x0, #CTX_FP_Q24] 560 stp q26, q27, [x0, #CTX_FP_Q26] 561 stp q28, q29, [x0, #CTX_FP_Q28] 562 stp q30, q31, [x0, #CTX_FP_Q30] 563 564 mrs x9, fpsr 565 str x9, [x0, #CTX_FP_FPSR] 566 567 mrs x10, fpcr 568 str x10, [x0, #CTX_FP_FPCR] 569 570#if CTX_INCLUDE_AARCH32_REGS 571 mrs x11, fpexc32_el2 572 str x11, [x0, #CTX_FP_FPEXC32_EL2] 573#endif /* CTX_INCLUDE_AARCH32_REGS */ 574 ret 575endfunc fpregs_context_save 576 577/* ------------------------------------------------------------------ 578 * The following function follows the aapcs_64 strictly to use x9-x17 579 * (temporary caller-saved registers according to AArch64 PCS) to 580 * restore floating point register context. It assumes that 'x0' is 581 * pointing to a 'fp_regs' structure from where the register context 582 * will be restored. 583 * 584 * Access to VFP registers will trap if CPTR_EL3.TFP is set. 585 * However currently we don't use VFP registers nor set traps in 586 * Trusted Firmware, and assume it's cleared. 587 * 588 * TODO: Revisit when VFP is used in secure world 589 * ------------------------------------------------------------------ 590 */ 591func fpregs_context_restore 592 ldp q0, q1, [x0, #CTX_FP_Q0] 593 ldp q2, q3, [x0, #CTX_FP_Q2] 594 ldp q4, q5, [x0, #CTX_FP_Q4] 595 ldp q6, q7, [x0, #CTX_FP_Q6] 596 ldp q8, q9, [x0, #CTX_FP_Q8] 597 ldp q10, q11, [x0, #CTX_FP_Q10] 598 ldp q12, q13, [x0, #CTX_FP_Q12] 599 ldp q14, q15, [x0, #CTX_FP_Q14] 600 ldp q16, q17, [x0, #CTX_FP_Q16] 601 ldp q18, q19, [x0, #CTX_FP_Q18] 602 ldp q20, q21, [x0, #CTX_FP_Q20] 603 ldp q22, q23, [x0, #CTX_FP_Q22] 604 ldp q24, q25, [x0, #CTX_FP_Q24] 605 ldp q26, q27, [x0, #CTX_FP_Q26] 606 ldp q28, q29, [x0, #CTX_FP_Q28] 607 ldp q30, q31, [x0, #CTX_FP_Q30] 608 609 ldr x9, [x0, #CTX_FP_FPSR] 610 msr fpsr, x9 611 612 ldr x10, [x0, #CTX_FP_FPCR] 613 msr fpcr, x10 614 615#if CTX_INCLUDE_AARCH32_REGS 616 ldr x11, [x0, #CTX_FP_FPEXC32_EL2] 617 msr fpexc32_el2, x11 618#endif /* CTX_INCLUDE_AARCH32_REGS */ 619 620 /* 621 * No explict ISB required here as ERET to 622 * switch to secure EL1 or non-secure world 623 * covers it 624 */ 625 626 ret 627endfunc fpregs_context_restore 628#endif /* CTX_INCLUDE_FPREGS */ 629 630 /* 631 * Set SCR_EL3.EA bit to enable SErrors at EL3 632 */ 633 .macro enable_serror_at_el3 634 mrs x8, scr_el3 635 orr x8, x8, #SCR_EA_BIT 636 msr scr_el3, x8 637 .endm 638 639 /* 640 * Set the PSTATE bits not set when the exception was taken as 641 * described in the AArch64.TakeException() pseudocode function 642 * in ARM DDI 0487F.c page J1-7635 to a default value. 643 */ 644 .macro set_unset_pstate_bits 645 /* 646 * If Data Independent Timing (DIT) functionality is implemented, 647 * always enable DIT in EL3 648 */ 649#if ENABLE_FEAT_DIT 650 mov x8, #DIT_BIT 651 msr DIT, x8 652#endif /* ENABLE_FEAT_DIT */ 653 .endm /* set_unset_pstate_bits */ 654 655/* ------------------------------------------------------------------ 656 * The following macro is used to save and restore all the general 657 * purpose and ARMv8.3-PAuth (if enabled) registers. 658 * It also checks if the Secure Cycle Counter (PMCCNTR_EL0) 659 * is disabled in EL3/Secure (ARMv8.5-PMU), wherein PMCCNTR_EL0 660 * needs not to be saved/restored during world switch. 661 * 662 * Ideally we would only save and restore the callee saved registers 663 * when a world switch occurs but that type of implementation is more 664 * complex. So currently we will always save and restore these 665 * registers on entry and exit of EL3. 666 * clobbers: x18 667 * ------------------------------------------------------------------ 668 */ 669 .macro save_gp_pmcr_pauth_regs 670 stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 671 stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 672 stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 673 stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] 674 stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] 675 stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] 676 stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] 677 stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] 678 stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] 679 stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] 680 stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] 681 stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] 682 stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] 683 stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] 684 stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] 685 mrs x18, sp_el0 686 str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] 687 688 /* ---------------------------------------------------------- 689 * Check if earlier initialization of MDCR_EL3.SCCD/MCCD to 1 690 * has failed. 691 * 692 * MDCR_EL3: 693 * MCCD bit set, Prohibits the Cycle Counter PMCCNTR_EL0 from 694 * counting at EL3. 695 * SCCD bit set, Secure Cycle Counter Disable. Prohibits PMCCNTR_EL0 696 * from counting in Secure state. 697 * If these bits are not set, meaning that FEAT_PMUv3p5/7 is 698 * not implemented and PMCR_EL0 should be saved in non-secure 699 * context. 700 * ---------------------------------------------------------- 701 */ 702 mov_imm x10, (MDCR_SCCD_BIT | MDCR_MCCD_BIT) 703 mrs x9, mdcr_el3 704 tst x9, x10 705 bne 1f 706 707 /* ---------------------------------------------------------- 708 * If control reaches here, it ensures the Secure Cycle 709 * Counter (PMCCNTR_EL0) is not prohibited from counting at 710 * EL3 and in secure states. 711 * Henceforth, PMCR_EL0 to be saved before world switch. 712 * ---------------------------------------------------------- 713 */ 714 mrs x9, pmcr_el0 715 716 /* Check caller's security state */ 717 mrs x10, scr_el3 718 tst x10, #SCR_NS_BIT 719 beq 2f 720 721 /* Save PMCR_EL0 if called from Non-secure state */ 722 str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] 723 724 /* Disable cycle counter when event counting is prohibited */ 7252: orr x9, x9, #PMCR_EL0_DP_BIT 726 msr pmcr_el0, x9 727 isb 7281: 729#if CTX_INCLUDE_PAUTH_REGS 730 /* ---------------------------------------------------------- 731 * Save the ARMv8.3-PAuth keys as they are not banked 732 * by exception level 733 * ---------------------------------------------------------- 734 */ 735 add x19, sp, #CTX_PAUTH_REGS_OFFSET 736 737 mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */ 738 mrs x21, APIAKeyHi_EL1 739 mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */ 740 mrs x23, APIBKeyHi_EL1 741 mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */ 742 mrs x25, APDAKeyHi_EL1 743 mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */ 744 mrs x27, APDBKeyHi_EL1 745 mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */ 746 mrs x29, APGAKeyHi_EL1 747 748 stp x20, x21, [x19, #CTX_PACIAKEY_LO] 749 stp x22, x23, [x19, #CTX_PACIBKEY_LO] 750 stp x24, x25, [x19, #CTX_PACDAKEY_LO] 751 stp x26, x27, [x19, #CTX_PACDBKEY_LO] 752 stp x28, x29, [x19, #CTX_PACGAKEY_LO] 753#endif /* CTX_INCLUDE_PAUTH_REGS */ 754 .endm /* save_gp_pmcr_pauth_regs */ 755 756/* ----------------------------------------------------------------- 757 * This function saves the context and sets the PSTATE to a known 758 * state, preparing entry to el3. 759 * Save all the general purpose and ARMv8.3-PAuth (if enabled) 760 * registers. 761 * Then set any of the PSTATE bits that are not set by hardware 762 * according to the Aarch64.TakeException pseudocode in the Arm 763 * Architecture Reference Manual to a default value for EL3. 764 * clobbers: x17 765 * ----------------------------------------------------------------- 766 */ 767func prepare_el3_entry 768 save_gp_pmcr_pauth_regs 769 enable_serror_at_el3 770 /* 771 * Set the PSTATE bits not described in the Aarch64.TakeException 772 * pseudocode to their default values. 773 */ 774 set_unset_pstate_bits 775 ret 776endfunc prepare_el3_entry 777 778/* ------------------------------------------------------------------ 779 * This function restores ARMv8.3-PAuth (if enabled) and all general 780 * purpose registers except x30 from the CPU context. 781 * x30 register must be explicitly restored by the caller. 782 * ------------------------------------------------------------------ 783 */ 784func restore_gp_pmcr_pauth_regs 785#if CTX_INCLUDE_PAUTH_REGS 786 /* Restore the ARMv8.3 PAuth keys */ 787 add x10, sp, #CTX_PAUTH_REGS_OFFSET 788 789 ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */ 790 ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */ 791 ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */ 792 ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */ 793 ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */ 794 795 msr APIAKeyLo_EL1, x0 796 msr APIAKeyHi_EL1, x1 797 msr APIBKeyLo_EL1, x2 798 msr APIBKeyHi_EL1, x3 799 msr APDAKeyLo_EL1, x4 800 msr APDAKeyHi_EL1, x5 801 msr APDBKeyLo_EL1, x6 802 msr APDBKeyHi_EL1, x7 803 msr APGAKeyLo_EL1, x8 804 msr APGAKeyHi_EL1, x9 805#endif /* CTX_INCLUDE_PAUTH_REGS */ 806 807 /* ---------------------------------------------------------- 808 * Restore PMCR_EL0 when returning to Non-secure state if 809 * Secure Cycle Counter is not disabled in MDCR_EL3 when 810 * ARMv8.5-PMU is implemented. 811 * ---------------------------------------------------------- 812 */ 813 mrs x0, scr_el3 814 tst x0, #SCR_NS_BIT 815 beq 2f 816 817 /* ---------------------------------------------------------- 818 * Back to Non-secure state. 819 * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1 820 * failed, meaning that FEAT_PMUv3p5/7 is not implemented and 821 * PMCR_EL0 should be restored from non-secure context. 822 * ---------------------------------------------------------- 823 */ 824 mov_imm x1, (MDCR_SCCD_BIT | MDCR_MCCD_BIT) 825 mrs x0, mdcr_el3 826 tst x0, x1 827 bne 2f 828 ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] 829 msr pmcr_el0, x0 8302: 831 ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 832 ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 833 ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 834 ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] 835 ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] 836 ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] 837 ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] 838 ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] 839 ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] 840 ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] 841 ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] 842 ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] 843 ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] 844 ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] 845 ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] 846 msr sp_el0, x28 847 ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] 848 ret 849endfunc restore_gp_pmcr_pauth_regs 850 851/* 852 * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1 853 * registers and update EL1 registers to disable stage1 and stage2 854 * page table walk 855 */ 856func save_and_update_ptw_el1_sys_regs 857 /* ---------------------------------------------------------- 858 * Save only sctlr_el1 and tcr_el1 registers 859 * ---------------------------------------------------------- 860 */ 861 mrs x29, sctlr_el1 862 str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)] 863 mrs x29, tcr_el1 864 str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)] 865 866 /* ------------------------------------------------------------ 867 * Must follow below order in order to disable page table 868 * walk for lower ELs (EL1 and EL0). First step ensures that 869 * page table walk is disabled for stage1 and second step 870 * ensures that page table walker should use TCR_EL1.EPDx 871 * bits to perform address translation. ISB ensures that CPU 872 * does these 2 steps in order. 873 * 874 * 1. Update TCR_EL1.EPDx bits to disable page table walk by 875 * stage1. 876 * 2. Enable MMU bit to avoid identity mapping via stage2 877 * and force TCR_EL1.EPDx to be used by the page table 878 * walker. 879 * ------------------------------------------------------------ 880 */ 881 orr x29, x29, #(TCR_EPD0_BIT) 882 orr x29, x29, #(TCR_EPD1_BIT) 883 msr tcr_el1, x29 884 isb 885 mrs x29, sctlr_el1 886 orr x29, x29, #SCTLR_M_BIT 887 msr sctlr_el1, x29 888 isb 889 890 ret 891endfunc save_and_update_ptw_el1_sys_regs 892 893/* ------------------------------------------------------------------ 894 * This routine assumes that the SP_EL3 is pointing to a valid 895 * context structure from where the gp regs and other special 896 * registers can be retrieved. 897 * ------------------------------------------------------------------ 898 */ 899func el3_exit 900#if ENABLE_ASSERTIONS 901 /* el3_exit assumes SP_EL0 on entry */ 902 mrs x17, spsel 903 cmp x17, #MODE_SP_EL0 904 ASM_ASSERT(eq) 905#endif /* ENABLE_ASSERTIONS */ 906 907 /* ---------------------------------------------------------- 908 * Save the current SP_EL0 i.e. the EL3 runtime stack which 909 * will be used for handling the next SMC. 910 * Then switch to SP_EL3. 911 * ---------------------------------------------------------- 912 */ 913 mov x17, sp 914 msr spsel, #MODE_SP_ELX 915 str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] 916 917#if IMAGE_BL31 918 /* ---------------------------------------------------------- 919 * Restore CPTR_EL3. 920 * ZCR is only restored if SVE is supported and enabled. 921 * Synchronization is required before zcr_el3 is addressed. 922 * ---------------------------------------------------------- 923 */ 924 ldp x19, x20, [sp, #CTX_EL3STATE_OFFSET + CTX_CPTR_EL3] 925 msr cptr_el3, x19 926 927 ands x19, x19, #CPTR_EZ_BIT 928 beq sve_not_enabled 929 930 isb 931 msr S3_6_C1_C2_0, x20 /* zcr_el3 */ 932sve_not_enabled: 933#endif /* IMAGE_BL31 */ 934 935#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 936 /* ---------------------------------------------------------- 937 * Restore mitigation state as it was on entry to EL3 938 * ---------------------------------------------------------- 939 */ 940 ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE] 941 cbz x17, 1f 942 blr x17 9431: 944#endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */ 945 946#if IMAGE_BL31 && RAS_EXTENSION 947 /* ---------------------------------------------------------- 948 * Issue Error Synchronization Barrier to synchronize SErrors 949 * before exiting EL3. We're running with EAs unmasked, so 950 * any synchronized errors would be taken immediately; 951 * therefore no need to inspect DISR_EL1 register. 952 * ---------------------------------------------------------- 953 */ 954 esb 955#else 956 dsb sy 957#endif /* IMAGE_BL31 && RAS_EXTENSION */ 958 959 /* ---------------------------------------------------------- 960 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET 961 * ---------------------------------------------------------- 962 */ 963 ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] 964 ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] 965 msr scr_el3, x18 966 msr spsr_el3, x16 967 msr elr_el3, x17 968 969 restore_ptw_el1_sys_regs 970 971 /* ---------------------------------------------------------- 972 * Restore general purpose (including x30), PMCR_EL0 and 973 * ARMv8.3-PAuth registers. 974 * Exit EL3 via ERET to a lower exception level. 975 * ---------------------------------------------------------- 976 */ 977 bl restore_gp_pmcr_pauth_regs 978 ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 979 980#ifdef IMAGE_BL31 981 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3] 982#endif /* IMAGE_BL31 */ 983 984 exception_return 985 986endfunc el3_exit 987