1df8f3188SJeenu Viswambharan/* 297215e0fSDaniel Boulby * Copyright (c) 2018-2022, ARM Limited and Contributors. All rights reserved. 3df56e9d1SVarun Wadekar * Copyright (c) 2022, NVIDIA Corporation. All rights reserved. 4df8f3188SJeenu Viswambharan * 5df8f3188SJeenu Viswambharan * SPDX-License-Identifier: BSD-3-Clause 6df8f3188SJeenu Viswambharan */ 7df8f3188SJeenu Viswambharan 8df8f3188SJeenu Viswambharan 9d5a23af5SJeenu Viswambharan#include <assert_macros.S> 10df8f3188SJeenu Viswambharan#include <asm_macros.S> 11ee6ff1bbSJeenu Viswambharan#include <assert_macros.S> 1209d40e0eSAntonio Nino Diaz#include <bl31/ea_handle.h> 13df8f3188SJeenu Viswambharan#include <context.h> 1409d40e0eSAntonio Nino Diaz#include <lib/extensions/ras_arch.h> 1580942622Slaurenw-arm#include <cpu_macros.S> 1680942622Slaurenw-arm#include <context.h> 17df8f3188SJeenu Viswambharan 18df8f3188SJeenu Viswambharan .globl handle_lower_el_ea_esb 19c2d32a5fSMadhukar Pappireddy .globl handle_lower_el_async_ea 20df8f3188SJeenu Viswambharan .globl enter_lower_el_sync_ea 21df8f3188SJeenu Viswambharan .globl enter_lower_el_async_ea 22df8f3188SJeenu Viswambharan 23df8f3188SJeenu Viswambharan 24df8f3188SJeenu Viswambharan/* 25df8f3188SJeenu Viswambharan * Function to delegate External Aborts synchronized by ESB instruction at EL3 26df8f3188SJeenu Viswambharan * vector entry. This function assumes GP registers x0-x29 have been saved, and 27df8f3188SJeenu Viswambharan * are available for use. It delegates the handling of the EA to platform 28df8f3188SJeenu Viswambharan * handler, and returns only upon successfully handling the EA; otherwise 29df8f3188SJeenu Viswambharan * panics. On return from this function, the original exception handler is 30df8f3188SJeenu Viswambharan * expected to resume. 31df8f3188SJeenu Viswambharan */ 32df8f3188SJeenu Viswambharanfunc handle_lower_el_ea_esb 33df8f3188SJeenu Viswambharan mov x0, #ERROR_EA_ESB 34df8f3188SJeenu Viswambharan mrs x1, DISR_EL1 35df8f3188SJeenu Viswambharan b ea_proceed 36df8f3188SJeenu Viswambharanendfunc handle_lower_el_ea_esb 37df8f3188SJeenu Viswambharan 38df8f3188SJeenu Viswambharan 39df8f3188SJeenu Viswambharan/* 40df8f3188SJeenu Viswambharan * This function forms the tail end of Synchronous Exception entry from lower 4180942622Slaurenw-arm * EL, and expects to handle Synchronous External Aborts from lower EL and CPU 4280942622Slaurenw-arm * Implementation Defined Exceptions. If any other kind of exception is detected, 4380942622Slaurenw-arm * then this function reports unhandled exception. 44df8f3188SJeenu Viswambharan * 45df8f3188SJeenu Viswambharan * Since it's part of exception vector, this function doesn't expect any GP 46df8f3188SJeenu Viswambharan * registers to have been saved. It delegates the handling of the EA to platform 47df8f3188SJeenu Viswambharan * handler, and upon successfully handling the EA, exits EL3; otherwise panics. 48df8f3188SJeenu Viswambharan */ 49df8f3188SJeenu Viswambharanfunc enter_lower_el_sync_ea 50df8f3188SJeenu Viswambharan /* 51df8f3188SJeenu Viswambharan * Explicitly save x30 so as to free up a register and to enable 52df8f3188SJeenu Viswambharan * branching. 53df8f3188SJeenu Viswambharan */ 54df8f3188SJeenu Viswambharan str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 55df8f3188SJeenu Viswambharan 56df8f3188SJeenu Viswambharan mrs x30, esr_el3 57df8f3188SJeenu Viswambharan ubfx x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH 58df8f3188SJeenu Viswambharan 59df8f3188SJeenu Viswambharan /* Check for I/D aborts from lower EL */ 60df8f3188SJeenu Viswambharan cmp x30, #EC_IABORT_LOWER_EL 61df8f3188SJeenu Viswambharan b.eq 1f 62df8f3188SJeenu Viswambharan 63df8f3188SJeenu Viswambharan cmp x30, #EC_DABORT_LOWER_EL 6480942622Slaurenw-arm b.eq 1f 6580942622Slaurenw-arm 6680942622Slaurenw-arm /* Save GP registers */ 6780942622Slaurenw-arm stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 6880942622Slaurenw-arm stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 6980942622Slaurenw-arm stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 7080942622Slaurenw-arm 7180942622Slaurenw-arm /* Get the cpu_ops pointer */ 7280942622Slaurenw-arm bl get_cpu_ops_ptr 7380942622Slaurenw-arm 7480942622Slaurenw-arm /* Get the cpu_ops exception handler */ 7580942622Slaurenw-arm ldr x0, [x0, #CPU_E_HANDLER_FUNC] 7680942622Slaurenw-arm 7780942622Slaurenw-arm /* 7880942622Slaurenw-arm * If the reserved function pointer is NULL, this CPU does not have an 7980942622Slaurenw-arm * implementation defined exception handler function 8080942622Slaurenw-arm */ 8180942622Slaurenw-arm cbz x0, 2f 8280942622Slaurenw-arm mrs x1, esr_el3 8380942622Slaurenw-arm ubfx x1, x1, #ESR_EC_SHIFT, #ESR_EC_LENGTH 8480942622Slaurenw-arm blr x0 8580942622Slaurenw-arm b 2f 86df8f3188SJeenu Viswambharan 87df8f3188SJeenu Viswambharan1: 88e290a8fcSAlexei Fedorov /* 89ed108b56SAlexei Fedorov * Save general purpose and ARMv8.3-PAuth registers (if enabled). 90ed108b56SAlexei Fedorov * If Secure Cycle Counter is not disabled in MDCR_EL3 when 91ed108b56SAlexei Fedorov * ARMv8.5-PMU is implemented, save PMCR_EL0 and disable Cycle Counter. 927d33ffe4SDaniel Boulby * Also set the PSTATE to a known state. 93e290a8fcSAlexei Fedorov */ 9497215e0fSDaniel Boulby bl prepare_el3_entry 95e290a8fcSAlexei Fedorov 96b86048c4SAntonio Nino Diaz#if ENABLE_PAUTH 97ed108b56SAlexei Fedorov /* Load and program APIAKey firmware key */ 98ed108b56SAlexei Fedorov bl pauth_load_bl31_apiakey 99b86048c4SAntonio Nino Diaz#endif 1005283962eSAntonio Nino Diaz 101df8f3188SJeenu Viswambharan /* Setup exception class and syndrome arguments for platform handler */ 102df8f3188SJeenu Viswambharan mov x0, #ERROR_EA_SYNC 103df8f3188SJeenu Viswambharan mrs x1, esr_el3 104bb9549baSJan Dabros bl delegate_sync_ea 105df8f3188SJeenu Viswambharan 106bb9549baSJan Dabros /* el3_exit assumes SP_EL0 on entry */ 107bb9549baSJan Dabros msr spsel, #MODE_SP_EL0 108bb9549baSJan Dabros b el3_exit 109df8f3188SJeenu Viswambharan2: 11080942622Slaurenw-arm ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] 11180942622Slaurenw-arm ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] 11280942622Slaurenw-arm ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] 11380942622Slaurenw-arm 114df8f3188SJeenu Viswambharan /* Synchronous exceptions other than the above are assumed to be EA */ 115df8f3188SJeenu Viswambharan ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 116df8f3188SJeenu Viswambharan no_ret report_unhandled_exception 117df8f3188SJeenu Viswambharanendfunc enter_lower_el_sync_ea 118df8f3188SJeenu Viswambharan 119df8f3188SJeenu Viswambharan 120df8f3188SJeenu Viswambharan/* 121df8f3188SJeenu Viswambharan * This function handles SErrors from lower ELs. 122df8f3188SJeenu Viswambharan * 123df8f3188SJeenu Viswambharan * Since it's part of exception vector, this function doesn't expect any GP 124df8f3188SJeenu Viswambharan * registers to have been saved. It delegates the handling of the EA to platform 125df8f3188SJeenu Viswambharan * handler, and upon successfully handling the EA, exits EL3; otherwise panics. 126df8f3188SJeenu Viswambharan */ 127df8f3188SJeenu Viswambharanfunc enter_lower_el_async_ea 128df8f3188SJeenu Viswambharan /* 129df8f3188SJeenu Viswambharan * Explicitly save x30 so as to free up a register and to enable 130df8f3188SJeenu Viswambharan * branching 131df8f3188SJeenu Viswambharan */ 132df8f3188SJeenu Viswambharan str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 133df8f3188SJeenu Viswambharan 134c2d32a5fSMadhukar Pappireddyhandle_lower_el_async_ea: 135e290a8fcSAlexei Fedorov /* 136ed108b56SAlexei Fedorov * Save general purpose and ARMv8.3-PAuth registers (if enabled). 137ed108b56SAlexei Fedorov * If Secure Cycle Counter is not disabled in MDCR_EL3 when 138ed108b56SAlexei Fedorov * ARMv8.5-PMU is implemented, save PMCR_EL0 and disable Cycle Counter. 1397d33ffe4SDaniel Boulby * Also set the PSTATE to a known state. 140e290a8fcSAlexei Fedorov */ 14197215e0fSDaniel Boulby bl prepare_el3_entry 142e290a8fcSAlexei Fedorov 143b86048c4SAntonio Nino Diaz#if ENABLE_PAUTH 144ed108b56SAlexei Fedorov /* Load and program APIAKey firmware key */ 145ed108b56SAlexei Fedorov bl pauth_load_bl31_apiakey 146b86048c4SAntonio Nino Diaz#endif 1475283962eSAntonio Nino Diaz 148df8f3188SJeenu Viswambharan /* Setup exception class and syndrome arguments for platform handler */ 149df8f3188SJeenu Viswambharan mov x0, #ERROR_EA_ASYNC 150df8f3188SJeenu Viswambharan mrs x1, esr_el3 151bb9549baSJan Dabros bl delegate_async_ea 152bb9549baSJan Dabros 153bb9549baSJan Dabros /* el3_exit assumes SP_EL0 on entry */ 154bb9549baSJan Dabros msr spsel, #MODE_SP_EL0 155bb9549baSJan Dabros b el3_exit 156df8f3188SJeenu Viswambharanendfunc enter_lower_el_async_ea 157df8f3188SJeenu Viswambharan 158df8f3188SJeenu Viswambharan 159df8f3188SJeenu Viswambharan/* 160b56dc2a9SJeenu Viswambharan * Prelude for Synchronous External Abort handling. This function assumes that 161b56dc2a9SJeenu Viswambharan * all GP registers have been saved by the caller. 162b56dc2a9SJeenu Viswambharan * 163b56dc2a9SJeenu Viswambharan * x0: EA reason 164b56dc2a9SJeenu Viswambharan * x1: EA syndrome 165b56dc2a9SJeenu Viswambharan */ 166b56dc2a9SJeenu Viswambharanfunc delegate_sync_ea 167b56dc2a9SJeenu Viswambharan#if RAS_EXTENSION 168b56dc2a9SJeenu Viswambharan /* 169b56dc2a9SJeenu Viswambharan * Check for Uncontainable error type. If so, route to the platform 170b56dc2a9SJeenu Viswambharan * fatal error handler rather than the generic EA one. 171b56dc2a9SJeenu Viswambharan */ 172b56dc2a9SJeenu Viswambharan ubfx x2, x1, #EABORT_SET_SHIFT, #EABORT_SET_WIDTH 173b56dc2a9SJeenu Viswambharan cmp x2, #ERROR_STATUS_SET_UC 174b56dc2a9SJeenu Viswambharan b.ne 1f 175b56dc2a9SJeenu Viswambharan 176b56dc2a9SJeenu Viswambharan /* Check fault status code */ 177b56dc2a9SJeenu Viswambharan ubfx x3, x1, #EABORT_DFSC_SHIFT, #EABORT_DFSC_WIDTH 178b56dc2a9SJeenu Viswambharan cmp x3, #SYNC_EA_FSC 179b56dc2a9SJeenu Viswambharan b.ne 1f 180b56dc2a9SJeenu Viswambharan 181b56dc2a9SJeenu Viswambharan no_ret plat_handle_uncontainable_ea 182b56dc2a9SJeenu Viswambharan1: 183b56dc2a9SJeenu Viswambharan#endif 184b56dc2a9SJeenu Viswambharan 185b56dc2a9SJeenu Viswambharan b ea_proceed 186b56dc2a9SJeenu Viswambharanendfunc delegate_sync_ea 187b56dc2a9SJeenu Viswambharan 188b56dc2a9SJeenu Viswambharan 189b56dc2a9SJeenu Viswambharan/* 190b56dc2a9SJeenu Viswambharan * Prelude for Asynchronous External Abort handling. This function assumes that 191b56dc2a9SJeenu Viswambharan * all GP registers have been saved by the caller. 192b56dc2a9SJeenu Viswambharan * 193b56dc2a9SJeenu Viswambharan * x0: EA reason 194b56dc2a9SJeenu Viswambharan * x1: EA syndrome 195b56dc2a9SJeenu Viswambharan */ 196b56dc2a9SJeenu Viswambharanfunc delegate_async_ea 197b56dc2a9SJeenu Viswambharan#if RAS_EXTENSION 198d435238dSManish Pandey /* Check Exception Class to ensure SError, as this function should 199d435238dSManish Pandey * only be invoked for SError. If that is not the case, which implies 200d435238dSManish Pandey * either an HW error or programming error, panic. 201d435238dSManish Pandey */ 202d435238dSManish Pandey ubfx x2, x1, #ESR_EC_SHIFT, #ESR_EC_LENGTH 203d435238dSManish Pandey cmp x2, EC_SERROR 204*bd62ce98SGovindraj Raja b.ne el3_panic 205b56dc2a9SJeenu Viswambharan /* 206b56dc2a9SJeenu Viswambharan * Check for Implementation Defined Syndrome. If so, skip checking 207b56dc2a9SJeenu Viswambharan * Uncontainable error type from the syndrome as the format is unknown. 208b56dc2a9SJeenu Viswambharan */ 209b56dc2a9SJeenu Viswambharan tbnz x1, #SERROR_IDS_BIT, 1f 210b56dc2a9SJeenu Viswambharan 211d435238dSManish Pandey /* AET only valid when DFSC is 0x11 */ 212d435238dSManish Pandey ubfx x2, x1, #EABORT_DFSC_SHIFT, #EABORT_DFSC_WIDTH 213d435238dSManish Pandey cmp x2, #DFSC_SERROR 214d435238dSManish Pandey b.ne 1f 215d435238dSManish Pandey 216b56dc2a9SJeenu Viswambharan /* 217b56dc2a9SJeenu Viswambharan * Check for Uncontainable error type. If so, route to the platform 218b56dc2a9SJeenu Viswambharan * fatal error handler rather than the generic EA one. 219b56dc2a9SJeenu Viswambharan */ 220d435238dSManish Pandey ubfx x3, x1, #EABORT_AET_SHIFT, #EABORT_AET_WIDTH 221d435238dSManish Pandey cmp x3, #ERROR_STATUS_UET_UC 222b56dc2a9SJeenu Viswambharan b.ne 1f 223b56dc2a9SJeenu Viswambharan 224b56dc2a9SJeenu Viswambharan no_ret plat_handle_uncontainable_ea 225b56dc2a9SJeenu Viswambharan1: 226b56dc2a9SJeenu Viswambharan#endif 227b56dc2a9SJeenu Viswambharan 228b56dc2a9SJeenu Viswambharan b ea_proceed 229b56dc2a9SJeenu Viswambharanendfunc delegate_async_ea 230b56dc2a9SJeenu Viswambharan 231b56dc2a9SJeenu Viswambharan 232b56dc2a9SJeenu Viswambharan/* 233df8f3188SJeenu Viswambharan * Delegate External Abort handling to platform's EA handler. This function 234df8f3188SJeenu Viswambharan * assumes that all GP registers have been saved by the caller. 235df8f3188SJeenu Viswambharan * 236df8f3188SJeenu Viswambharan * x0: EA reason 237df8f3188SJeenu Viswambharan * x1: EA syndrome 238df8f3188SJeenu Viswambharan */ 239df8f3188SJeenu Viswambharanfunc ea_proceed 240d5a23af5SJeenu Viswambharan /* 241d5a23af5SJeenu Viswambharan * If the ESR loaded earlier is not zero, we were processing an EA 242d5a23af5SJeenu Viswambharan * already, and this is a double fault. 243d5a23af5SJeenu Viswambharan */ 244d5a23af5SJeenu Viswambharan ldr x5, [sp, #CTX_EL3STATE_OFFSET + CTX_ESR_EL3] 245d5a23af5SJeenu Viswambharan cbz x5, 1f 246d5a23af5SJeenu Viswambharan no_ret plat_handle_double_fault 247d5a23af5SJeenu Viswambharan 248d5a23af5SJeenu Viswambharan1: 249df8f3188SJeenu Viswambharan /* Save EL3 state */ 250df8f3188SJeenu Viswambharan mrs x2, spsr_el3 251df8f3188SJeenu Viswambharan mrs x3, elr_el3 252df8f3188SJeenu Viswambharan stp x2, x3, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] 253df8f3188SJeenu Viswambharan 254df8f3188SJeenu Viswambharan /* 255df8f3188SJeenu Viswambharan * Save ESR as handling might involve lower ELs, and returning back to 256df8f3188SJeenu Viswambharan * EL3 from there would trample the original ESR. 257df8f3188SJeenu Viswambharan */ 258df8f3188SJeenu Viswambharan mrs x4, scr_el3 259df8f3188SJeenu Viswambharan mrs x5, esr_el3 260df8f3188SJeenu Viswambharan stp x4, x5, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] 261df8f3188SJeenu Viswambharan 262df8f3188SJeenu Viswambharan /* 263df8f3188SJeenu Viswambharan * Setup rest of arguments, and call platform External Abort handler. 264df8f3188SJeenu Viswambharan * 265df8f3188SJeenu Viswambharan * x0: EA reason (already in place) 266df8f3188SJeenu Viswambharan * x1: Exception syndrome (already in place). 267df8f3188SJeenu Viswambharan * x2: Cookie (unused for now). 268df8f3188SJeenu Viswambharan * x3: Context pointer. 269df8f3188SJeenu Viswambharan * x4: Flags (security state from SCR for now). 270df8f3188SJeenu Viswambharan */ 271df8f3188SJeenu Viswambharan mov x2, xzr 272df8f3188SJeenu Viswambharan mov x3, sp 273df8f3188SJeenu Viswambharan ubfx x4, x4, #0, #1 274df8f3188SJeenu Viswambharan 275df8f3188SJeenu Viswambharan /* Switch to runtime stack */ 276df8f3188SJeenu Viswambharan ldr x5, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] 277ed108b56SAlexei Fedorov msr spsel, #MODE_SP_EL0 278df8f3188SJeenu Viswambharan mov sp, x5 279df8f3188SJeenu Viswambharan 280df8f3188SJeenu Viswambharan mov x29, x30 281ee6ff1bbSJeenu Viswambharan#if ENABLE_ASSERTIONS 282ee6ff1bbSJeenu Viswambharan /* Stash the stack pointer */ 283ee6ff1bbSJeenu Viswambharan mov x28, sp 284ee6ff1bbSJeenu Viswambharan#endif 285df8f3188SJeenu Viswambharan bl plat_ea_handler 286df8f3188SJeenu Viswambharan 287ee6ff1bbSJeenu Viswambharan#if ENABLE_ASSERTIONS 288ee6ff1bbSJeenu Viswambharan /* 289ee6ff1bbSJeenu Viswambharan * Error handling flows might involve long jumps; so upon returning from 290ee6ff1bbSJeenu Viswambharan * the platform error handler, validate that the we've completely 291ee6ff1bbSJeenu Viswambharan * unwound the stack. 292ee6ff1bbSJeenu Viswambharan */ 293ee6ff1bbSJeenu Viswambharan mov x27, sp 294ee6ff1bbSJeenu Viswambharan cmp x28, x27 295ee6ff1bbSJeenu Viswambharan ASM_ASSERT(eq) 296ee6ff1bbSJeenu Viswambharan#endif 297ee6ff1bbSJeenu Viswambharan 298df8f3188SJeenu Viswambharan /* Make SP point to context */ 299ed108b56SAlexei Fedorov msr spsel, #MODE_SP_ELX 300df8f3188SJeenu Viswambharan 301d5a23af5SJeenu Viswambharan /* Restore EL3 state and ESR */ 302df8f3188SJeenu Viswambharan ldp x1, x2, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] 303df8f3188SJeenu Viswambharan msr spsr_el3, x1 304df8f3188SJeenu Viswambharan msr elr_el3, x2 305df8f3188SJeenu Viswambharan 306df8f3188SJeenu Viswambharan /* Restore ESR_EL3 and SCR_EL3 */ 307df8f3188SJeenu Viswambharan ldp x3, x4, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] 308df8f3188SJeenu Viswambharan msr scr_el3, x3 309df8f3188SJeenu Viswambharan msr esr_el3, x4 310df8f3188SJeenu Viswambharan 311d5a23af5SJeenu Viswambharan#if ENABLE_ASSERTIONS 312d5a23af5SJeenu Viswambharan cmp x4, xzr 313d5a23af5SJeenu Viswambharan ASM_ASSERT(ne) 314d5a23af5SJeenu Viswambharan#endif 315d5a23af5SJeenu Viswambharan 316d5a23af5SJeenu Viswambharan /* Clear ESR storage */ 317d5a23af5SJeenu Viswambharan str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_ESR_EL3] 318d5a23af5SJeenu Viswambharan 319d5a23af5SJeenu Viswambharan ret x29 320df8f3188SJeenu Viswambharanendfunc ea_proceed 321