1/* 2 * Copyright (c) 2018-2025, Arm Limited and Contributors. All rights reserved. 3 * Copyright (c) 2022, NVIDIA Corporation. All rights reserved. 4 * 5 * SPDX-License-Identifier: BSD-3-Clause 6 */ 7 8 9#include <asm_macros.S> 10#include <cpu_macros.S> 11#include <context.h> 12 13 .globl handle_pending_async_ea 14 15/* 16 * Handler for async EA from lower EL synchronized at EL3 entry in FFH mode. 17 * 18 * This scenario may arise when there is an error (EA) in the system which is not 19 * yet signaled to PE while executing in lower EL. During entry into EL3, the errors 20 * are synchronized either implicitly or explicitly causing async EA to pend at EL3. 21 * 22 * On detecting the pending EA (via ISR_EL1.A), if the EA routing model is Firmware 23 * First handling (FFH, SCR_EL3.EA = 1) this handler first handles the pending EA 24 * and then handles the original exception. 25 * 26 * This function assumes x30 has been saved. 27 */ 28func handle_pending_async_ea 29 /* 30 * Prepare for nested handling of EA. Stash sysregs clobbered by nested 31 * exception and handler 32 */ 33 str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_GPREG_LR] 34 mrs x30, esr_el3 35 str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ESR_EL3] 36 mrs x30, spsr_el3 37 str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_SPSR_EL3] 38 mrs x30, elr_el3 39 str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ELR_EL3] 40 41 mov x30, #1 42 str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_NESTED_EA_FLAG] 43 /* 44 * Restore the original x30 saved as part of entering EL3. This is not 45 * required for the current function but for EL3 SError vector entry 46 * once PSTATE.A bit is unmasked. We restore x30 and then the same 47 * value is stored in EL3 SError vector entry. 48 */ 49 ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 50 51 /* 52 * After clearing PSTATE.A bit pending SError will trigger at current EL. 53 * Put explicit synchronization event to ensure newly unmasked interrupt 54 * is taken immediately. 55 */ 56 unmask_async_ea 57 58 /* Restore the original exception information along with zeroing the storage */ 59 ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ELR_EL3] 60 msr elr_el3, x30 61 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ELR_EL3] 62 ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_SPSR_EL3] 63 msr spsr_el3, x30 64 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_SPSR_EL3] 65 ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ESR_EL3] 66 msr esr_el3, x30 67 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ESR_EL3] 68 69 /* 70 * If the original exception corresponds to SError from lower El, eret back 71 * to lower EL, otherwise return to vector table for original exception handling. 72 */ 73 ubfx x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH 74 cmp x30, #EC_SERROR 75 ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_GPREG_LR] 76 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_GPREG_LR] 77 b.eq 1f 78 ret 791: 80 ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 81 str xzr, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] 82 exception_return 83endfunc handle_pending_async_ea 84