1/* 2 * Copyright (c) 2018-2025, Arm Limited and Contributors. All rights reserved. 3 * Copyright (c) 2022, NVIDIA Corporation. All rights reserved. 4 * 5 * SPDX-License-Identifier: BSD-3-Clause 6 */ 7 8 9#include <assert_macros.S> 10#include <asm_macros.S> 11#include <assert_macros.S> 12#include <bl31/ea_handle.h> 13#include <context.h> 14#include <lib/extensions/ras_arch.h> 15#include <cpu_macros.S> 16#include <context.h> 17 18 .globl handle_lower_el_sync_ea 19 .globl handle_lower_el_async_ea 20 .globl handle_pending_async_ea 21/* 22 * This function handles Synchronous External Aborts from lower EL. 23 * 24 * It delegates the handling of the EA to platform handler, and upon successfully 25 * handling the EA, exits EL3; otherwise panics. 26 * 27 * This function assumes x30 has been saved. 28 */ 29func handle_lower_el_sync_ea 30 mrs x30, esr_el3 31 ubfx x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH 32 33 /* Check for I/D aborts from lower EL */ 34 cmp x30, #EC_IABORT_LOWER_EL 35 b.eq 1f 36 37 cmp x30, #EC_DABORT_LOWER_EL 38 b.eq 1f 39 40 /* EA other than above are unhandled exceptions */ 41 no_ret report_unhandled_exception 421: 43 /* 44 * Save general purpose and ARMv8.3-PAuth registers (if enabled). 45 * Also save PMCR_EL0 and set the PSTATE to a known state. 46 */ 47 bl prepare_el3_entry 48 49 /* Setup exception class and syndrome arguments for platform handler */ 50 mov x0, #ERROR_EA_SYNC 51 mrs x1, esr_el3 52 bl delegate_sync_ea 53 54 /* el3_exit assumes SP_EL0 on entry */ 55 msr spsel, #MODE_SP_EL0 56 b el3_exit 57endfunc handle_lower_el_sync_ea 58 59 60/* 61 * This function handles SErrors from lower ELs. 62 * 63 * It delegates the handling of the EA to platform handler, and upon successfully 64 * handling the EA, exits EL3; otherwise panics. 65 * 66 * This function assumes x30 has been saved. 67 */ 68func handle_lower_el_async_ea 69 70 /* 71 * Save general purpose and ARMv8.3-PAuth registers (if enabled). 72 * Also save PMCR_EL0 and set the PSTATE to a known state. 73 */ 74 bl prepare_el3_entry 75 76 /* Setup exception class and syndrome arguments for platform handler */ 77 mov x0, #ERROR_EA_ASYNC 78 mrs x1, esr_el3 79 bl delegate_async_ea 80 81 /* el3_exit assumes SP_EL0 on entry */ 82 msr spsel, #MODE_SP_EL0 83 b el3_exit 84endfunc handle_lower_el_async_ea 85 86/* 87 * Handler for async EA from lower EL synchronized at EL3 entry in FFH mode. 88 * 89 * This scenario may arise when there is an error (EA) in the system which is not 90 * yet signaled to PE while executing in lower EL. During entry into EL3, the errors 91 * are synchronized either implicitly or explicitly causing async EA to pend at EL3. 92 * 93 * On detecting the pending EA (via ISR_EL1.A), if the EA routing model is Firmware 94 * First handling (FFH, SCR_EL3.EA = 1) this handler first handles the pending EA 95 * and then handles the original exception. 96 * 97 * This function assumes x30 has been saved. 98 */ 99func handle_pending_async_ea 100 /* 101 * Prepare for nested handling of EA. Stash sysregs clobbered by nested 102 * exception and handler 103 */ 104 str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_GPREG_LR] 105 mrs x30, esr_el3 106 str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ESR_EL3] 107 mrs x30, spsr_el3 108 str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_SPSR_EL3] 109 mrs x30, elr_el3 110 str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ELR_EL3] 111 112 mov x30, #1 113 str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_NESTED_EA_FLAG] 114 /* 115 * Restore the original x30 saved as part of entering EL3. This is not 116 * required for the current function but for EL3 SError vector entry 117 * once PSTATE.A bit is unmasked. We restore x30 and then the same 118 * value is stored in EL3 SError vector entry. 119 */ 120 ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 121 122 /* 123 * After clearing PSTATE.A bit pending SError will trigger at current EL. 124 * Put explicit synchronization event to ensure newly unmasked interrupt 125 * is taken immediately. 126 */ 127 unmask_async_ea 128 129 /* Restore the original exception information along with zeroing the storage */ 130 ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ELR_EL3] 131 msr elr_el3, x30 132 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ELR_EL3] 133 ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_SPSR_EL3] 134 msr spsr_el3, x30 135 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_SPSR_EL3] 136 ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ESR_EL3] 137 msr esr_el3, x30 138 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ESR_EL3] 139 140 /* 141 * If the original exception corresponds to SError from lower El, eret back 142 * to lower EL, otherwise return to vector table for original exception handling. 143 */ 144 ubfx x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH 145 cmp x30, #EC_SERROR 146 ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_GPREG_LR] 147 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_GPREG_LR] 148 b.eq 1f 149 ret 1501: 151 ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 152 str xzr, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 153 exception_return 154endfunc handle_pending_async_ea 155 156/* 157 * Prelude for Synchronous External Abort handling. This function assumes that 158 * all GP registers have been saved by the caller. 159 * 160 * x0: EA reason 161 * x1: EA syndrome 162 */ 163func delegate_sync_ea 164#if ENABLE_FEAT_RAS 165 /* 166 * Check for Uncontainable error type. If so, route to the platform 167 * fatal error handler rather than the generic EA one. 168 */ 169 ubfx x2, x1, #EABORT_SET_SHIFT, #EABORT_SET_WIDTH 170 cmp x2, #ERROR_STATUS_SET_UC 171 b.ne 1f 172 173 /* Check fault status code */ 174 ubfx x3, x1, #EABORT_DFSC_SHIFT, #EABORT_DFSC_WIDTH 175 cmp x3, #SYNC_EA_FSC 176 b.ne 1f 177 178 no_ret plat_handle_uncontainable_ea 1791: 180#endif 181 182 b ea_proceed 183endfunc delegate_sync_ea 184 185 186/* 187 * Prelude for Asynchronous External Abort handling. This function assumes that 188 * all GP registers have been saved by the caller. 189 * 190 * x0: EA reason 191 * x1: EA syndrome 192 */ 193func delegate_async_ea 194#if ENABLE_FEAT_RAS 195 /* Check Exception Class to ensure SError, as this function should 196 * only be invoked for SError. If that is not the case, which implies 197 * either an HW error or programming error, panic. 198 */ 199 ubfx x2, x1, #ESR_EC_SHIFT, #ESR_EC_LENGTH 200 cmp x2, EC_SERROR 201 b.ne el3_panic 202 /* 203 * Check for Implementation Defined Syndrome. If so, skip checking 204 * Uncontainable error type from the syndrome as the format is unknown. 205 */ 206 tbnz x1, #SERROR_IDS_BIT, 1f 207 208 /* AET only valid when DFSC is 0x11 */ 209 ubfx x2, x1, #EABORT_DFSC_SHIFT, #EABORT_DFSC_WIDTH 210 cmp x2, #DFSC_SERROR 211 b.ne 1f 212 213 /* 214 * Check for Uncontainable error type. If so, route to the platform 215 * fatal error handler rather than the generic EA one. 216 */ 217 ubfx x3, x1, #EABORT_AET_SHIFT, #EABORT_AET_WIDTH 218 cmp x3, #ERROR_STATUS_UET_UC 219 b.ne 1f 220 221 no_ret plat_handle_uncontainable_ea 2221: 223#endif 224 225 b ea_proceed 226endfunc delegate_async_ea 227 228 229/* 230 * Delegate External Abort handling to platform's EA handler. This function 231 * assumes that all GP registers have been saved by the caller. 232 * 233 * x0: EA reason 234 * x1: EA syndrome 235 */ 236func ea_proceed 237 /* 238 * If it is a double fault invoke platform handler. 239 * Double fault scenario would arise when platform is handling a fault in 240 * lower EL using plat_ea_handler() and another fault happens which would 241 * trap into EL3 as FFH_SUPPORT is enabled for the platform. 242 */ 243 ldr x5, [sp, #CTX_EL3STATE_OFFSET + CTX_DOUBLE_FAULT_ESR] 244 cbz x5, 1f 245 no_ret plat_handle_double_fault 246 2471: 248 /* Save EL3 state as handling might involve lower ELs */ 249 mrs x2, spsr_el3 250 mrs x3, elr_el3 251 stp x2, x3, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] 252 253 /* 254 * Save CTX_DOUBLE_FAULT_ESR, so that if another fault happens in lower EL, we 255 * catch it as DoubleFault in next invocation of ea_proceed() along with 256 * preserving original ESR_EL3. 257 */ 258 mrs x5, esr_el3 259 str x5, [sp, #CTX_EL3STATE_OFFSET + CTX_DOUBLE_FAULT_ESR] 260 261 /* 262 * Setup rest of arguments, and call platform External Abort handler. 263 * 264 * x0: EA reason (already in place) 265 * x1: Exception syndrome (already in place). 266 * x2: Cookie (unused for now). 267 * x3: Context pointer. 268 * x4: Flags (security state from SCR for now). 269 */ 270 mov x2, xzr 271 mov x3, sp 272 ubfx x4, x4, #0, #1 273 274 /* Switch to runtime stack */ 275 ldr x5, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] 276 msr spsel, #MODE_SP_EL0 277 mov sp, x5 278 279 mov x29, x30 280#if ENABLE_ASSERTIONS 281 /* Stash the stack pointer */ 282 mov x28, sp 283#endif 284 bl plat_ea_handler 285 286#if ENABLE_ASSERTIONS 287 /* 288 * Error handling flows might involve long jumps; so upon returning from 289 * the platform error handler, validate that the we've completely 290 * unwound the stack. 291 */ 292 mov x27, sp 293 cmp x28, x27 294 ASM_ASSERT(eq) 295#endif 296 297 /* Make SP point to context */ 298 msr spsel, #MODE_SP_ELX 299 300 /* Clear Double Fault storage */ 301 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_DOUBLE_FAULT_ESR] 302 303 ret x29 304endfunc ea_proceed 305