1df8f3188SJeenu Viswambharan/* 25283962eSAntonio Nino Diaz * Copyright (c) 2018-2019, ARM Limited and Contributors. All rights reserved. 3df8f3188SJeenu Viswambharan * 4df8f3188SJeenu Viswambharan * SPDX-License-Identifier: BSD-3-Clause 5df8f3188SJeenu Viswambharan */ 6df8f3188SJeenu Viswambharan 7df8f3188SJeenu Viswambharan 8d5a23af5SJeenu Viswambharan#include <assert_macros.S> 9df8f3188SJeenu Viswambharan#include <asm_macros.S> 10ee6ff1bbSJeenu Viswambharan#include <assert_macros.S> 1109d40e0eSAntonio Nino Diaz#include <bl31/ea_handle.h> 12df8f3188SJeenu Viswambharan#include <context.h> 1309d40e0eSAntonio Nino Diaz#include <lib/extensions/ras_arch.h> 14df8f3188SJeenu Viswambharan 15df8f3188SJeenu Viswambharan 16df8f3188SJeenu Viswambharan .globl handle_lower_el_ea_esb 17df8f3188SJeenu Viswambharan .globl enter_lower_el_sync_ea 18df8f3188SJeenu Viswambharan .globl enter_lower_el_async_ea 19df8f3188SJeenu Viswambharan 20df8f3188SJeenu Viswambharan 21df8f3188SJeenu Viswambharan/* 22df8f3188SJeenu Viswambharan * Function to delegate External Aborts synchronized by ESB instruction at EL3 23df8f3188SJeenu Viswambharan * vector entry. This function assumes GP registers x0-x29 have been saved, and 24df8f3188SJeenu Viswambharan * are available for use. It delegates the handling of the EA to platform 25df8f3188SJeenu Viswambharan * handler, and returns only upon successfully handling the EA; otherwise 26df8f3188SJeenu Viswambharan * panics. On return from this function, the original exception handler is 27df8f3188SJeenu Viswambharan * expected to resume. 28df8f3188SJeenu Viswambharan */ 29df8f3188SJeenu Viswambharanfunc handle_lower_el_ea_esb 30df8f3188SJeenu Viswambharan mov x0, #ERROR_EA_ESB 31df8f3188SJeenu Viswambharan mrs x1, DISR_EL1 32df8f3188SJeenu Viswambharan b ea_proceed 33df8f3188SJeenu Viswambharanendfunc handle_lower_el_ea_esb 34df8f3188SJeenu Viswambharan 35df8f3188SJeenu Viswambharan 36df8f3188SJeenu Viswambharan/* 37df8f3188SJeenu Viswambharan * This function forms the tail end of Synchronous Exception entry from lower 38df8f3188SJeenu Viswambharan * EL, and expects to handle only Synchronous External Aborts from lower EL. If 39df8f3188SJeenu Viswambharan * any other kind of exception is detected, then this function reports unhandled 40df8f3188SJeenu Viswambharan * exception. 41df8f3188SJeenu Viswambharan * 42df8f3188SJeenu Viswambharan * Since it's part of exception vector, this function doesn't expect any GP 43df8f3188SJeenu Viswambharan * registers to have been saved. It delegates the handling of the EA to platform 44df8f3188SJeenu Viswambharan * handler, and upon successfully handling the EA, exits EL3; otherwise panics. 45df8f3188SJeenu Viswambharan */ 46df8f3188SJeenu Viswambharanfunc enter_lower_el_sync_ea 47df8f3188SJeenu Viswambharan /* 48df8f3188SJeenu Viswambharan * Explicitly save x30 so as to free up a register and to enable 49df8f3188SJeenu Viswambharan * branching. 50df8f3188SJeenu Viswambharan */ 51df8f3188SJeenu Viswambharan str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 52df8f3188SJeenu Viswambharan 53df8f3188SJeenu Viswambharan mrs x30, esr_el3 54df8f3188SJeenu Viswambharan ubfx x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH 55df8f3188SJeenu Viswambharan 56df8f3188SJeenu Viswambharan /* Check for I/D aborts from lower EL */ 57df8f3188SJeenu Viswambharan cmp x30, #EC_IABORT_LOWER_EL 58df8f3188SJeenu Viswambharan b.eq 1f 59df8f3188SJeenu Viswambharan 60df8f3188SJeenu Viswambharan cmp x30, #EC_DABORT_LOWER_EL 61df8f3188SJeenu Viswambharan b.ne 2f 62df8f3188SJeenu Viswambharan 63df8f3188SJeenu Viswambharan1: 64df8f3188SJeenu Viswambharan /* Test for EA bit in the instruction syndrome */ 65df8f3188SJeenu Viswambharan mrs x30, esr_el3 66df8f3188SJeenu Viswambharan tbz x30, #ESR_ISS_EABORT_EA_BIT, 2f 67df8f3188SJeenu Viswambharan 68df8f3188SJeenu Viswambharan /* Save GP registers */ 69df8f3188SJeenu Viswambharan bl save_gp_registers 70df8f3188SJeenu Viswambharan 71*e290a8fcSAlexei Fedorov /* 72*e290a8fcSAlexei Fedorov * If Secure Cycle Counter is not disabled in MDCR_EL3 73*e290a8fcSAlexei Fedorov * when ARMv8.5-PMU is implemented, save PMCR_EL0 and 74*e290a8fcSAlexei Fedorov * disable all event counters and cycle counter. 75*e290a8fcSAlexei Fedorov */ 76*e290a8fcSAlexei Fedorov bl save_pmcr_disable_pmu 77*e290a8fcSAlexei Fedorov 78b86048c4SAntonio Nino Diaz /* Save ARMv8.3-PAuth registers and load firmware key */ 795283962eSAntonio Nino Diaz#if CTX_INCLUDE_PAUTH_REGS 805283962eSAntonio Nino Diaz bl pauth_context_save 815283962eSAntonio Nino Diaz#endif 82b86048c4SAntonio Nino Diaz#if ENABLE_PAUTH 83b86048c4SAntonio Nino Diaz bl pauth_load_bl_apiakey 84b86048c4SAntonio Nino Diaz#endif 855283962eSAntonio Nino Diaz 86df8f3188SJeenu Viswambharan /* Setup exception class and syndrome arguments for platform handler */ 87df8f3188SJeenu Viswambharan mov x0, #ERROR_EA_SYNC 88df8f3188SJeenu Viswambharan mrs x1, esr_el3 89df8f3188SJeenu Viswambharan adr x30, el3_exit 90b56dc2a9SJeenu Viswambharan b delegate_sync_ea 91df8f3188SJeenu Viswambharan 92df8f3188SJeenu Viswambharan2: 93df8f3188SJeenu Viswambharan /* Synchronous exceptions other than the above are assumed to be EA */ 94df8f3188SJeenu Viswambharan ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 95df8f3188SJeenu Viswambharan no_ret report_unhandled_exception 96df8f3188SJeenu Viswambharanendfunc enter_lower_el_sync_ea 97df8f3188SJeenu Viswambharan 98df8f3188SJeenu Viswambharan 99df8f3188SJeenu Viswambharan/* 100df8f3188SJeenu Viswambharan * This function handles SErrors from lower ELs. 101df8f3188SJeenu Viswambharan * 102df8f3188SJeenu Viswambharan * Since it's part of exception vector, this function doesn't expect any GP 103df8f3188SJeenu Viswambharan * registers to have been saved. It delegates the handling of the EA to platform 104df8f3188SJeenu Viswambharan * handler, and upon successfully handling the EA, exits EL3; otherwise panics. 105df8f3188SJeenu Viswambharan */ 106df8f3188SJeenu Viswambharanfunc enter_lower_el_async_ea 107df8f3188SJeenu Viswambharan /* 108df8f3188SJeenu Viswambharan * Explicitly save x30 so as to free up a register and to enable 109df8f3188SJeenu Viswambharan * branching 110df8f3188SJeenu Viswambharan */ 111df8f3188SJeenu Viswambharan str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 112df8f3188SJeenu Viswambharan 113df8f3188SJeenu Viswambharan /* Save GP registers */ 114df8f3188SJeenu Viswambharan bl save_gp_registers 115df8f3188SJeenu Viswambharan 116*e290a8fcSAlexei Fedorov /* 117*e290a8fcSAlexei Fedorov * If Secure Cycle Counter is not disabled in MDCR_EL3 118*e290a8fcSAlexei Fedorov * when ARMv8.5-PMU is implemented, save PMCR_EL0 and 119*e290a8fcSAlexei Fedorov * disable all event counters and cycle counter. 120*e290a8fcSAlexei Fedorov */ 121*e290a8fcSAlexei Fedorov bl save_pmcr_disable_pmu 122*e290a8fcSAlexei Fedorov 123b86048c4SAntonio Nino Diaz /* Save ARMv8.3-PAuth registers and load firmware key */ 1245283962eSAntonio Nino Diaz#if CTX_INCLUDE_PAUTH_REGS 1255283962eSAntonio Nino Diaz bl pauth_context_save 1265283962eSAntonio Nino Diaz#endif 127b86048c4SAntonio Nino Diaz#if ENABLE_PAUTH 128b86048c4SAntonio Nino Diaz bl pauth_load_bl_apiakey 129b86048c4SAntonio Nino Diaz#endif 1305283962eSAntonio Nino Diaz 131df8f3188SJeenu Viswambharan /* Setup exception class and syndrome arguments for platform handler */ 132df8f3188SJeenu Viswambharan mov x0, #ERROR_EA_ASYNC 133df8f3188SJeenu Viswambharan mrs x1, esr_el3 134df8f3188SJeenu Viswambharan adr x30, el3_exit 135b56dc2a9SJeenu Viswambharan b delegate_async_ea 136df8f3188SJeenu Viswambharanendfunc enter_lower_el_async_ea 137df8f3188SJeenu Viswambharan 138df8f3188SJeenu Viswambharan 139df8f3188SJeenu Viswambharan/* 140b56dc2a9SJeenu Viswambharan * Prelude for Synchronous External Abort handling. This function assumes that 141b56dc2a9SJeenu Viswambharan * all GP registers have been saved by the caller. 142b56dc2a9SJeenu Viswambharan * 143b56dc2a9SJeenu Viswambharan * x0: EA reason 144b56dc2a9SJeenu Viswambharan * x1: EA syndrome 145b56dc2a9SJeenu Viswambharan */ 146b56dc2a9SJeenu Viswambharanfunc delegate_sync_ea 147b56dc2a9SJeenu Viswambharan#if RAS_EXTENSION 148b56dc2a9SJeenu Viswambharan /* 149b56dc2a9SJeenu Viswambharan * Check for Uncontainable error type. If so, route to the platform 150b56dc2a9SJeenu Viswambharan * fatal error handler rather than the generic EA one. 151b56dc2a9SJeenu Viswambharan */ 152b56dc2a9SJeenu Viswambharan ubfx x2, x1, #EABORT_SET_SHIFT, #EABORT_SET_WIDTH 153b56dc2a9SJeenu Viswambharan cmp x2, #ERROR_STATUS_SET_UC 154b56dc2a9SJeenu Viswambharan b.ne 1f 155b56dc2a9SJeenu Viswambharan 156b56dc2a9SJeenu Viswambharan /* Check fault status code */ 157b56dc2a9SJeenu Viswambharan ubfx x3, x1, #EABORT_DFSC_SHIFT, #EABORT_DFSC_WIDTH 158b56dc2a9SJeenu Viswambharan cmp x3, #SYNC_EA_FSC 159b56dc2a9SJeenu Viswambharan b.ne 1f 160b56dc2a9SJeenu Viswambharan 161b56dc2a9SJeenu Viswambharan no_ret plat_handle_uncontainable_ea 162b56dc2a9SJeenu Viswambharan1: 163b56dc2a9SJeenu Viswambharan#endif 164b56dc2a9SJeenu Viswambharan 165b56dc2a9SJeenu Viswambharan b ea_proceed 166b56dc2a9SJeenu Viswambharanendfunc delegate_sync_ea 167b56dc2a9SJeenu Viswambharan 168b56dc2a9SJeenu Viswambharan 169b56dc2a9SJeenu Viswambharan/* 170b56dc2a9SJeenu Viswambharan * Prelude for Asynchronous External Abort handling. This function assumes that 171b56dc2a9SJeenu Viswambharan * all GP registers have been saved by the caller. 172b56dc2a9SJeenu Viswambharan * 173b56dc2a9SJeenu Viswambharan * x0: EA reason 174b56dc2a9SJeenu Viswambharan * x1: EA syndrome 175b56dc2a9SJeenu Viswambharan */ 176b56dc2a9SJeenu Viswambharanfunc delegate_async_ea 177b56dc2a9SJeenu Viswambharan#if RAS_EXTENSION 178b56dc2a9SJeenu Viswambharan /* 179b56dc2a9SJeenu Viswambharan * Check for Implementation Defined Syndrome. If so, skip checking 180b56dc2a9SJeenu Viswambharan * Uncontainable error type from the syndrome as the format is unknown. 181b56dc2a9SJeenu Viswambharan */ 182b56dc2a9SJeenu Viswambharan tbnz x1, #SERROR_IDS_BIT, 1f 183b56dc2a9SJeenu Viswambharan 184b56dc2a9SJeenu Viswambharan /* 185b56dc2a9SJeenu Viswambharan * Check for Uncontainable error type. If so, route to the platform 186b56dc2a9SJeenu Viswambharan * fatal error handler rather than the generic EA one. 187b56dc2a9SJeenu Viswambharan */ 188b56dc2a9SJeenu Viswambharan ubfx x2, x1, #EABORT_AET_SHIFT, #EABORT_AET_WIDTH 189b56dc2a9SJeenu Viswambharan cmp x2, #ERROR_STATUS_UET_UC 190b56dc2a9SJeenu Viswambharan b.ne 1f 191b56dc2a9SJeenu Viswambharan 192b56dc2a9SJeenu Viswambharan /* Check DFSC for SError type */ 193b56dc2a9SJeenu Viswambharan ubfx x3, x1, #EABORT_DFSC_SHIFT, #EABORT_DFSC_WIDTH 194b56dc2a9SJeenu Viswambharan cmp x3, #DFSC_SERROR 195b56dc2a9SJeenu Viswambharan b.ne 1f 196b56dc2a9SJeenu Viswambharan 197b56dc2a9SJeenu Viswambharan no_ret plat_handle_uncontainable_ea 198b56dc2a9SJeenu Viswambharan1: 199b56dc2a9SJeenu Viswambharan#endif 200b56dc2a9SJeenu Viswambharan 201b56dc2a9SJeenu Viswambharan b ea_proceed 202b56dc2a9SJeenu Viswambharanendfunc delegate_async_ea 203b56dc2a9SJeenu Viswambharan 204b56dc2a9SJeenu Viswambharan 205b56dc2a9SJeenu Viswambharan/* 206df8f3188SJeenu Viswambharan * Delegate External Abort handling to platform's EA handler. This function 207df8f3188SJeenu Viswambharan * assumes that all GP registers have been saved by the caller. 208df8f3188SJeenu Viswambharan * 209df8f3188SJeenu Viswambharan * x0: EA reason 210df8f3188SJeenu Viswambharan * x1: EA syndrome 211df8f3188SJeenu Viswambharan */ 212df8f3188SJeenu Viswambharanfunc ea_proceed 213d5a23af5SJeenu Viswambharan /* 214d5a23af5SJeenu Viswambharan * If the ESR loaded earlier is not zero, we were processing an EA 215d5a23af5SJeenu Viswambharan * already, and this is a double fault. 216d5a23af5SJeenu Viswambharan */ 217d5a23af5SJeenu Viswambharan ldr x5, [sp, #CTX_EL3STATE_OFFSET + CTX_ESR_EL3] 218d5a23af5SJeenu Viswambharan cbz x5, 1f 219d5a23af5SJeenu Viswambharan no_ret plat_handle_double_fault 220d5a23af5SJeenu Viswambharan 221d5a23af5SJeenu Viswambharan1: 222df8f3188SJeenu Viswambharan /* Save EL3 state */ 223df8f3188SJeenu Viswambharan mrs x2, spsr_el3 224df8f3188SJeenu Viswambharan mrs x3, elr_el3 225df8f3188SJeenu Viswambharan stp x2, x3, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] 226df8f3188SJeenu Viswambharan 227df8f3188SJeenu Viswambharan /* 228df8f3188SJeenu Viswambharan * Save ESR as handling might involve lower ELs, and returning back to 229df8f3188SJeenu Viswambharan * EL3 from there would trample the original ESR. 230df8f3188SJeenu Viswambharan */ 231df8f3188SJeenu Viswambharan mrs x4, scr_el3 232df8f3188SJeenu Viswambharan mrs x5, esr_el3 233df8f3188SJeenu Viswambharan stp x4, x5, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] 234df8f3188SJeenu Viswambharan 235df8f3188SJeenu Viswambharan /* 236df8f3188SJeenu Viswambharan * Setup rest of arguments, and call platform External Abort handler. 237df8f3188SJeenu Viswambharan * 238df8f3188SJeenu Viswambharan * x0: EA reason (already in place) 239df8f3188SJeenu Viswambharan * x1: Exception syndrome (already in place). 240df8f3188SJeenu Viswambharan * x2: Cookie (unused for now). 241df8f3188SJeenu Viswambharan * x3: Context pointer. 242df8f3188SJeenu Viswambharan * x4: Flags (security state from SCR for now). 243df8f3188SJeenu Viswambharan */ 244df8f3188SJeenu Viswambharan mov x2, xzr 245df8f3188SJeenu Viswambharan mov x3, sp 246df8f3188SJeenu Viswambharan ubfx x4, x4, #0, #1 247df8f3188SJeenu Viswambharan 248df8f3188SJeenu Viswambharan /* Switch to runtime stack */ 249df8f3188SJeenu Viswambharan ldr x5, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] 250df8f3188SJeenu Viswambharan msr spsel, #0 251df8f3188SJeenu Viswambharan mov sp, x5 252df8f3188SJeenu Viswambharan 253df8f3188SJeenu Viswambharan mov x29, x30 254ee6ff1bbSJeenu Viswambharan#if ENABLE_ASSERTIONS 255ee6ff1bbSJeenu Viswambharan /* Stash the stack pointer */ 256ee6ff1bbSJeenu Viswambharan mov x28, sp 257ee6ff1bbSJeenu Viswambharan#endif 258df8f3188SJeenu Viswambharan bl plat_ea_handler 259df8f3188SJeenu Viswambharan 260ee6ff1bbSJeenu Viswambharan#if ENABLE_ASSERTIONS 261ee6ff1bbSJeenu Viswambharan /* 262ee6ff1bbSJeenu Viswambharan * Error handling flows might involve long jumps; so upon returning from 263ee6ff1bbSJeenu Viswambharan * the platform error handler, validate that the we've completely 264ee6ff1bbSJeenu Viswambharan * unwound the stack. 265ee6ff1bbSJeenu Viswambharan */ 266ee6ff1bbSJeenu Viswambharan mov x27, sp 267ee6ff1bbSJeenu Viswambharan cmp x28, x27 268ee6ff1bbSJeenu Viswambharan ASM_ASSERT(eq) 269ee6ff1bbSJeenu Viswambharan#endif 270ee6ff1bbSJeenu Viswambharan 271df8f3188SJeenu Viswambharan /* Make SP point to context */ 272df8f3188SJeenu Viswambharan msr spsel, #1 273df8f3188SJeenu Viswambharan 274d5a23af5SJeenu Viswambharan /* Restore EL3 state and ESR */ 275df8f3188SJeenu Viswambharan ldp x1, x2, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] 276df8f3188SJeenu Viswambharan msr spsr_el3, x1 277df8f3188SJeenu Viswambharan msr elr_el3, x2 278df8f3188SJeenu Viswambharan 279df8f3188SJeenu Viswambharan /* Restore ESR_EL3 and SCR_EL3 */ 280df8f3188SJeenu Viswambharan ldp x3, x4, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] 281df8f3188SJeenu Viswambharan msr scr_el3, x3 282df8f3188SJeenu Viswambharan msr esr_el3, x4 283df8f3188SJeenu Viswambharan 284d5a23af5SJeenu Viswambharan#if ENABLE_ASSERTIONS 285d5a23af5SJeenu Viswambharan cmp x4, xzr 286d5a23af5SJeenu Viswambharan ASM_ASSERT(ne) 287d5a23af5SJeenu Viswambharan#endif 288d5a23af5SJeenu Viswambharan 289d5a23af5SJeenu Viswambharan /* Clear ESR storage */ 290d5a23af5SJeenu Viswambharan str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_ESR_EL3] 291d5a23af5SJeenu Viswambharan 292d5a23af5SJeenu Viswambharan ret x29 293df8f3188SJeenu Viswambharanendfunc ea_proceed 294