1/* 2 * Copyright (c) 2018-2022, ARM Limited and Contributors. All rights reserved. 3 * Copyright (c) 2022, NVIDIA Corporation. All rights reserved. 4 * 5 * SPDX-License-Identifier: BSD-3-Clause 6 */ 7 8 9#include <assert_macros.S> 10#include <asm_macros.S> 11#include <assert_macros.S> 12#include <bl31/ea_handle.h> 13#include <context.h> 14#include <lib/extensions/ras_arch.h> 15#include <cpu_macros.S> 16#include <context.h> 17 18 .globl handle_lower_el_ea_esb 19 .globl handle_lower_el_sync_ea 20 .globl handle_lower_el_async_ea 21 22 23/* 24 * Function to delegate External Aborts synchronized by ESB instruction at EL3 25 * vector entry. This function assumes GP registers x0-x29 have been saved, and 26 * are available for use. It delegates the handling of the EA to platform 27 * handler, and returns only upon successfully handling the EA; otherwise 28 * panics. On return from this function, the original exception handler is 29 * expected to resume. 30 */ 31func handle_lower_el_ea_esb 32 mov x0, #ERROR_EA_ESB 33 mrs x1, DISR_EL1 34 b ea_proceed 35endfunc handle_lower_el_ea_esb 36 37 38/* 39 * This function forms the tail end of Synchronous Exception entry from lower 40 * EL, and expects to handle Synchronous External Aborts from lower EL and CPU 41 * Implementation Defined Exceptions. If any other kind of exception is detected, 42 * then this function reports unhandled exception. 43 * 44 * It delegates the handling of the EA to platform handler, and upon successfully 45 * handling the EA, exits EL3; otherwise panics. 46 * 47 * This function assumes x30 has been saved. 48 */ 49func handle_lower_el_sync_ea 50 mrs x30, esr_el3 51 ubfx x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH 52 53 /* Check for I/D aborts from lower EL */ 54 cmp x30, #EC_IABORT_LOWER_EL 55 b.eq 1f 56 57 cmp x30, #EC_DABORT_LOWER_EL 58 b.eq 1f 59 60 /* Save GP registers */ 61 stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 62 stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 63 stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 64 65 /* Get the cpu_ops pointer */ 66 bl get_cpu_ops_ptr 67 68 /* Get the cpu_ops exception handler */ 69 ldr x0, [x0, #CPU_E_HANDLER_FUNC] 70 71 /* 72 * If the reserved function pointer is NULL, this CPU does not have an 73 * implementation defined exception handler function 74 */ 75 cbz x0, 2f 76 mrs x1, esr_el3 77 ubfx x1, x1, #ESR_EC_SHIFT, #ESR_EC_LENGTH 78 blr x0 79 b 2f 80 811: 82 /* 83 * Save general purpose and ARMv8.3-PAuth registers (if enabled). 84 * Also save PMCR_EL0 and set the PSTATE to a known state. 85 */ 86 bl prepare_el3_entry 87 88#if ENABLE_PAUTH 89 /* Load and program APIAKey firmware key */ 90 bl pauth_load_bl31_apiakey 91#endif 92 93 /* Setup exception class and syndrome arguments for platform handler */ 94 mov x0, #ERROR_EA_SYNC 95 mrs x1, esr_el3 96 bl delegate_sync_ea 97 98 /* el3_exit assumes SP_EL0 on entry */ 99 msr spsel, #MODE_SP_EL0 100 b el3_exit 1012: 102 ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 103 ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 104 ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 105 106 /* Synchronous exceptions other than the above are assumed to be EA */ 107 ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 108 no_ret report_unhandled_exception 109endfunc handle_lower_el_sync_ea 110 111 112/* 113 * This function handles SErrors from lower ELs. 114 * 115 * It delegates the handling of the EA to platform handler, and upon successfully 116 * handling the EA, exits EL3; otherwise panics. 117 * 118 * This function assumes x30 has been saved. 119 */ 120func handle_lower_el_async_ea 121 122 /* 123 * Save general purpose and ARMv8.3-PAuth registers (if enabled). 124 * Also save PMCR_EL0 and set the PSTATE to a known state. 125 */ 126 bl prepare_el3_entry 127 128#if ENABLE_PAUTH 129 /* Load and program APIAKey firmware key */ 130 bl pauth_load_bl31_apiakey 131#endif 132 133 /* Setup exception class and syndrome arguments for platform handler */ 134 mov x0, #ERROR_EA_ASYNC 135 mrs x1, esr_el3 136 bl delegate_async_ea 137 138 /* el3_exit assumes SP_EL0 on entry */ 139 msr spsel, #MODE_SP_EL0 140 b el3_exit 141endfunc handle_lower_el_async_ea 142 143 144/* 145 * Prelude for Synchronous External Abort handling. This function assumes that 146 * all GP registers have been saved by the caller. 147 * 148 * x0: EA reason 149 * x1: EA syndrome 150 */ 151func delegate_sync_ea 152#if RAS_FFH_SUPPORT 153 /* 154 * Check for Uncontainable error type. If so, route to the platform 155 * fatal error handler rather than the generic EA one. 156 */ 157 ubfx x2, x1, #EABORT_SET_SHIFT, #EABORT_SET_WIDTH 158 cmp x2, #ERROR_STATUS_SET_UC 159 b.ne 1f 160 161 /* Check fault status code */ 162 ubfx x3, x1, #EABORT_DFSC_SHIFT, #EABORT_DFSC_WIDTH 163 cmp x3, #SYNC_EA_FSC 164 b.ne 1f 165 166 no_ret plat_handle_uncontainable_ea 1671: 168#endif 169 170 b ea_proceed 171endfunc delegate_sync_ea 172 173 174/* 175 * Prelude for Asynchronous External Abort handling. This function assumes that 176 * all GP registers have been saved by the caller. 177 * 178 * x0: EA reason 179 * x1: EA syndrome 180 */ 181func delegate_async_ea 182#if RAS_FFH_SUPPORT 183 /* Check Exception Class to ensure SError, as this function should 184 * only be invoked for SError. If that is not the case, which implies 185 * either an HW error or programming error, panic. 186 */ 187 ubfx x2, x1, #ESR_EC_SHIFT, #ESR_EC_LENGTH 188 cmp x2, EC_SERROR 189 b.ne el3_panic 190 /* 191 * Check for Implementation Defined Syndrome. If so, skip checking 192 * Uncontainable error type from the syndrome as the format is unknown. 193 */ 194 tbnz x1, #SERROR_IDS_BIT, 1f 195 196 /* AET only valid when DFSC is 0x11 */ 197 ubfx x2, x1, #EABORT_DFSC_SHIFT, #EABORT_DFSC_WIDTH 198 cmp x2, #DFSC_SERROR 199 b.ne 1f 200 201 /* 202 * Check for Uncontainable error type. If so, route to the platform 203 * fatal error handler rather than the generic EA one. 204 */ 205 ubfx x3, x1, #EABORT_AET_SHIFT, #EABORT_AET_WIDTH 206 cmp x3, #ERROR_STATUS_UET_UC 207 b.ne 1f 208 209 no_ret plat_handle_uncontainable_ea 2101: 211#endif 212 213 b ea_proceed 214endfunc delegate_async_ea 215 216 217/* 218 * Delegate External Abort handling to platform's EA handler. This function 219 * assumes that all GP registers have been saved by the caller. 220 * 221 * x0: EA reason 222 * x1: EA syndrome 223 */ 224func ea_proceed 225 /* 226 * If the ESR loaded earlier is not zero, we were processing an EA 227 * already, and this is a double fault. 228 */ 229 ldr x5, [sp, #CTX_EL3STATE_OFFSET + CTX_ESR_EL3] 230 cbz x5, 1f 231 no_ret plat_handle_double_fault 232 2331: 234 /* Save EL3 state */ 235 mrs x2, spsr_el3 236 mrs x3, elr_el3 237 stp x2, x3, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] 238 239 /* 240 * Save ESR as handling might involve lower ELs, and returning back to 241 * EL3 from there would trample the original ESR. 242 */ 243 mrs x4, scr_el3 244 mrs x5, esr_el3 245 stp x4, x5, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] 246 247 /* 248 * Setup rest of arguments, and call platform External Abort handler. 249 * 250 * x0: EA reason (already in place) 251 * x1: Exception syndrome (already in place). 252 * x2: Cookie (unused for now). 253 * x3: Context pointer. 254 * x4: Flags (security state from SCR for now). 255 */ 256 mov x2, xzr 257 mov x3, sp 258 ubfx x4, x4, #0, #1 259 260 /* Switch to runtime stack */ 261 ldr x5, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] 262 msr spsel, #MODE_SP_EL0 263 mov sp, x5 264 265 mov x29, x30 266#if ENABLE_ASSERTIONS 267 /* Stash the stack pointer */ 268 mov x28, sp 269#endif 270 bl plat_ea_handler 271 272#if ENABLE_ASSERTIONS 273 /* 274 * Error handling flows might involve long jumps; so upon returning from 275 * the platform error handler, validate that the we've completely 276 * unwound the stack. 277 */ 278 mov x27, sp 279 cmp x28, x27 280 ASM_ASSERT(eq) 281#endif 282 283 /* Make SP point to context */ 284 msr spsel, #MODE_SP_ELX 285 286 /* Restore EL3 state and ESR */ 287 ldp x1, x2, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] 288 msr spsr_el3, x1 289 msr elr_el3, x2 290 291 /* Restore ESR_EL3 and SCR_EL3 */ 292 ldp x3, x4, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] 293 msr scr_el3, x3 294 msr esr_el3, x4 295 296#if ENABLE_ASSERTIONS 297 cmp x4, xzr 298 ASM_ASSERT(ne) 299#endif 300 301 /* Clear ESR storage */ 302 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_ESR_EL3] 303 304 ret x29 305endfunc ea_proceed 306