xref: /rk3399_ARM-atf/bl31/bl31_traps.c (revision c282384dbb45b6185b4aba14efebbad110d18e49)
1ccd81f1eSAndre Przywara /*
23c789bfcSManish Pandey  * Copyright (c) 2022-2024, Arm Limited. All rights reserved.
30ed3be6fSVarun Wadekar  * Copyright (c) 2023, NVIDIA Corporation. All rights reserved.
4ccd81f1eSAndre Przywara  *
5ccd81f1eSAndre Przywara  * SPDX-License-Identifier: BSD-3-Clause
6ccd81f1eSAndre Przywara  *
7ccd81f1eSAndre Przywara  * Dispatch synchronous system register traps from lower ELs.
8ccd81f1eSAndre Przywara  */
9ccd81f1eSAndre Przywara 
103c789bfcSManish Pandey #include <arch_features.h>
113c789bfcSManish Pandey #include <arch_helpers.h>
12ccd81f1eSAndre Przywara #include <bl31/sync_handle.h>
13ccd81f1eSAndre Przywara #include <context.h>
143c789bfcSManish Pandey #include <lib/el3_runtime/context_mgmt.h>
15ccd81f1eSAndre Przywara 
16ccd81f1eSAndre Przywara int handle_sysreg_trap(uint64_t esr_el3, cpu_context_t *ctx)
17ccd81f1eSAndre Przywara {
180ed3be6fSVarun Wadekar 	uint64_t __unused opcode = esr_el3 & ISS_SYSREG_OPCODE_MASK;
190ed3be6fSVarun Wadekar 
201ae75529SAndre Przywara #if ENABLE_FEAT_RNG_TRAP
210ed3be6fSVarun Wadekar 	if ((opcode == ISS_SYSREG_OPCODE_RNDR) || (opcode == ISS_SYSREG_OPCODE_RNDRRS)) {
221ae75529SAndre Przywara 		return plat_handle_rng_trap(esr_el3, ctx);
23ccd81f1eSAndre Przywara 	}
240ed3be6fSVarun Wadekar #endif
250ed3be6fSVarun Wadekar 
260ed3be6fSVarun Wadekar #if IMPDEF_SYSREG_TRAP
270ed3be6fSVarun Wadekar 	if ((opcode & ISS_SYSREG_OPCODE_IMPDEF) == ISS_SYSREG_OPCODE_IMPDEF) {
280ed3be6fSVarun Wadekar 		return plat_handle_impdef_trap(esr_el3, ctx);
290ed3be6fSVarun Wadekar 	}
300ed3be6fSVarun Wadekar #endif
310ed3be6fSVarun Wadekar 
320ed3be6fSVarun Wadekar 	return TRAP_RET_UNHANDLED;
33ccd81f1eSAndre Przywara }
343c789bfcSManish Pandey 
353c789bfcSManish Pandey static bool is_tge_enabled(void)
363c789bfcSManish Pandey {
373c789bfcSManish Pandey 	u_register_t hcr_el2 = read_hcr_el2();
383c789bfcSManish Pandey 
393c789bfcSManish Pandey 	return ((read_feat_vhe_id_field() != 0U) && ((hcr_el2 & HCR_TGE_BIT) != 0U));
403c789bfcSManish Pandey }
413c789bfcSManish Pandey 
423c789bfcSManish Pandey /*
433c789bfcSManish Pandey  * This function is to ensure that undef injection does not happen into
443c789bfcSManish Pandey  * non-existent S-EL2. This could happen when trap happens from S-EL{1,0}
453c789bfcSManish Pandey  * and non-secure world is running with TGE bit set, considering EL3 does
463c789bfcSManish Pandey  * not save/restore EL2 registers if only one world has EL2 enabled.
473c789bfcSManish Pandey  * So reading hcr_el2.TGE would give NS world value.
483c789bfcSManish Pandey  */
493c789bfcSManish Pandey static bool is_secure_trap_without_sel2(u_register_t scr)
503c789bfcSManish Pandey {
513c789bfcSManish Pandey 	return ((scr & (SCR_NS_BIT | SCR_EEL2_BIT)) == 0);
523c789bfcSManish Pandey }
533c789bfcSManish Pandey 
543c789bfcSManish Pandey static unsigned int target_el(unsigned int from_el, u_register_t scr)
553c789bfcSManish Pandey {
563c789bfcSManish Pandey 	if (from_el > MODE_EL1) {
573c789bfcSManish Pandey 		return from_el;
583c789bfcSManish Pandey 	} else if (is_tge_enabled() && !is_secure_trap_without_sel2(scr)) {
593c789bfcSManish Pandey 		return MODE_EL2;
603c789bfcSManish Pandey 	} else {
613c789bfcSManish Pandey 		return MODE_EL1;
623c789bfcSManish Pandey 	}
633c789bfcSManish Pandey }
643c789bfcSManish Pandey 
653c789bfcSManish Pandey static u_register_t get_elr_el3(u_register_t spsr_el3, u_register_t vbar, unsigned int target_el)
663c789bfcSManish Pandey {
673c789bfcSManish Pandey 	unsigned int outgoing_el = GET_EL(spsr_el3);
683c789bfcSManish Pandey 	u_register_t elr_el3 = 0;
693c789bfcSManish Pandey 
703c789bfcSManish Pandey 	if (outgoing_el == target_el) {
713c789bfcSManish Pandey 		/*
723c789bfcSManish Pandey 		 * Target EL is either EL1 or EL2, lsb can tell us the SPsel
733c789bfcSManish Pandey 		 *  Thread mode  : 0
743c789bfcSManish Pandey 		 *  Handler mode : 1
753c789bfcSManish Pandey 		 */
763c789bfcSManish Pandey 		if ((spsr_el3 & (MODE_SP_MASK << MODE_SP_SHIFT)) == MODE_SP_ELX) {
773c789bfcSManish Pandey 			elr_el3 = vbar + CURRENT_EL_SPX;
783c789bfcSManish Pandey 		} else {
793c789bfcSManish Pandey 			elr_el3 = vbar + CURRENT_EL_SP0;
803c789bfcSManish Pandey 		}
813c789bfcSManish Pandey 	} else {
823c789bfcSManish Pandey 		/* Vector address for Lower EL using Aarch64 */
833c789bfcSManish Pandey 		elr_el3 = vbar + LOWER_EL_AARCH64;
843c789bfcSManish Pandey 	}
853c789bfcSManish Pandey 
863c789bfcSManish Pandey 	return elr_el3;
873c789bfcSManish Pandey }
883c789bfcSManish Pandey 
893c789bfcSManish Pandey /*
903c789bfcSManish Pandey  * Explicitly create all bits of SPSR to get PSTATE at exception return.
913c789bfcSManish Pandey  *
923c789bfcSManish Pandey  * The code is based on "Aarch64.exceptions.takeexception" described in
933c789bfcSManish Pandey  * DDI0602 revision 2023-06.
943c789bfcSManish Pandey  * "https://developer.arm.com/documentation/ddi0602/2023-06/Shared-Pseudocode/
953c789bfcSManish Pandey  * aarch64-exceptions-takeexception"
963c789bfcSManish Pandey  *
973c789bfcSManish Pandey  * NOTE: This piece of code must be reviewed every release to ensure that
983c789bfcSManish Pandey  * we keep up with new ARCH features which introduces a new SPSR bit.
993c789bfcSManish Pandey  */
10003fafc0bSArvind Ram Prakash u_register_t create_spsr(u_register_t old_spsr, unsigned int target_el)
1013c789bfcSManish Pandey {
1023c789bfcSManish Pandey 	u_register_t new_spsr = 0;
1033c789bfcSManish Pandey 	u_register_t sctlr;
1043c789bfcSManish Pandey 
1053c789bfcSManish Pandey 	/* Set M bits for target EL in AArch64 mode, also get sctlr */
1063c789bfcSManish Pandey 	if (target_el == MODE_EL2) {
1073c789bfcSManish Pandey 		sctlr = read_sctlr_el2();
1083c789bfcSManish Pandey 		new_spsr |= (SPSR_M_AARCH64 << SPSR_M_SHIFT) | SPSR_M_EL2H;
1093c789bfcSManish Pandey 	} else {
1103c789bfcSManish Pandey 		sctlr = read_sctlr_el1();
1113c789bfcSManish Pandey 		new_spsr |= (SPSR_M_AARCH64 << SPSR_M_SHIFT) | SPSR_M_EL1H;
1123c789bfcSManish Pandey 	}
1133c789bfcSManish Pandey 
1143c789bfcSManish Pandey 	/* Mask all exceptions, update DAIF bits */
1153c789bfcSManish Pandey 	new_spsr |= SPSR_DAIF_MASK << SPSR_DAIF_SHIFT;
1163c789bfcSManish Pandey 
1173c789bfcSManish Pandey 	/* If FEAT_BTI is present, clear BTYPE bits */
1183c789bfcSManish Pandey 	new_spsr |= old_spsr & (SPSR_BTYPE_MASK_AARCH64 << SPSR_BTYPE_SHIFT_AARCH64);
1193c789bfcSManish Pandey 	if (is_armv8_5_bti_present()) {
1203c789bfcSManish Pandey 		new_spsr &= ~(SPSR_BTYPE_MASK_AARCH64 << SPSR_BTYPE_SHIFT_AARCH64);
1213c789bfcSManish Pandey 	}
1223c789bfcSManish Pandey 
1233c789bfcSManish Pandey 	/* If SSBS is implemented, take the value from SCTLR.DSSBS */
1243c789bfcSManish Pandey 	new_spsr |= old_spsr & SPSR_SSBS_BIT_AARCH64;
1253c789bfcSManish Pandey 	if (is_feat_ssbs_present()) {
1263c789bfcSManish Pandey 		if ((sctlr & SCTLR_DSSBS_BIT) != 0U) {
1273c789bfcSManish Pandey 			new_spsr |= SPSR_SSBS_BIT_AARCH64;
1283c789bfcSManish Pandey 		} else {
1293c789bfcSManish Pandey 			new_spsr &= ~SPSR_SSBS_BIT_AARCH64;
1303c789bfcSManish Pandey 		}
1313c789bfcSManish Pandey 	}
1323c789bfcSManish Pandey 
1333c789bfcSManish Pandey 	/* If FEAT_NMI is implemented, ALLINT = !(SCTLR.SPINTMASK) */
1343c789bfcSManish Pandey 	new_spsr |= old_spsr & SPSR_ALLINT_BIT_AARCH64;
1353c789bfcSManish Pandey 	if (is_feat_nmi_present()) {
1363c789bfcSManish Pandey 		if ((sctlr & SCTLR_SPINTMASK_BIT) != 0U) {
1373c789bfcSManish Pandey 			new_spsr &= ~SPSR_ALLINT_BIT_AARCH64;
1383c789bfcSManish Pandey 		} else {
1393c789bfcSManish Pandey 			new_spsr |= SPSR_ALLINT_BIT_AARCH64;
1403c789bfcSManish Pandey 		}
1413c789bfcSManish Pandey 	}
1423c789bfcSManish Pandey 
1433c789bfcSManish Pandey 	/* Clear PSTATE.IL bit explicitly */
1443c789bfcSManish Pandey 	new_spsr &= ~SPSR_IL_BIT;
1453c789bfcSManish Pandey 
1463c789bfcSManish Pandey 	/* Clear PSTATE.SS bit explicitly */
1473c789bfcSManish Pandey 	new_spsr &= ~SPSR_SS_BIT;
1483c789bfcSManish Pandey 
1493c789bfcSManish Pandey 	/* Update PSTATE.PAN bit */
1503c789bfcSManish Pandey 	new_spsr |= old_spsr & SPSR_PAN_BIT;
1513c789bfcSManish Pandey 	if (is_feat_pan_present() &&
1523c789bfcSManish Pandey 	    ((target_el == MODE_EL1) || ((target_el == MODE_EL2) && is_tge_enabled())) &&
1533c789bfcSManish Pandey 	    ((sctlr & SCTLR_SPAN_BIT) == 0U)) {
1543c789bfcSManish Pandey 	    new_spsr |= SPSR_PAN_BIT;
1553c789bfcSManish Pandey 	}
1563c789bfcSManish Pandey 
1573c789bfcSManish Pandey 	/* Clear UAO bit if FEAT_UAO is present */
1583c789bfcSManish Pandey 	new_spsr |= old_spsr & SPSR_UAO_BIT_AARCH64;
1593c789bfcSManish Pandey 	if (is_feat_uao_present()) {
1603c789bfcSManish Pandey 		new_spsr &= ~SPSR_UAO_BIT_AARCH64;
1613c789bfcSManish Pandey 	}
1623c789bfcSManish Pandey 
1633c789bfcSManish Pandey 	/* DIT bits are unchanged */
1643c789bfcSManish Pandey 	new_spsr |= old_spsr & SPSR_DIT_BIT;
1653c789bfcSManish Pandey 
1663c789bfcSManish Pandey 	/* If FEAT_MTE2 is implemented mask tag faults by setting TCO bit */
1673c789bfcSManish Pandey 	new_spsr |= old_spsr & SPSR_TCO_BIT_AARCH64;
168*c282384dSGovindraj Raja 	if (is_feat_mte2_present()) {
1693c789bfcSManish Pandey 		new_spsr |= SPSR_TCO_BIT_AARCH64;
1703c789bfcSManish Pandey 	}
1713c789bfcSManish Pandey 
1723c789bfcSManish Pandey 	/* NZCV bits are unchanged */
1733c789bfcSManish Pandey 	new_spsr |= old_spsr & SPSR_NZCV;
1743c789bfcSManish Pandey 
1753c789bfcSManish Pandey 	/* If FEAT_EBEP is present set PM bit */
1763c789bfcSManish Pandey 	new_spsr |= old_spsr & SPSR_PM_BIT_AARCH64;
1773c789bfcSManish Pandey 	if (is_feat_ebep_present()) {
1783c789bfcSManish Pandey 		new_spsr |= SPSR_PM_BIT_AARCH64;
1793c789bfcSManish Pandey 	}
1803c789bfcSManish Pandey 
1813c789bfcSManish Pandey 	/* If FEAT_SEBEP is present clear PPEND bit */
1823c789bfcSManish Pandey 	new_spsr |= old_spsr & SPSR_PPEND_BIT;
1833c789bfcSManish Pandey 	if (is_feat_sebep_present()) {
1843c789bfcSManish Pandey 		new_spsr &= ~SPSR_PPEND_BIT;
1853c789bfcSManish Pandey 	}
1863c789bfcSManish Pandey 
1873c789bfcSManish Pandey 	/* If FEAT_GCS is present, update EXLOCK bit */
1883c789bfcSManish Pandey 	new_spsr |= old_spsr & SPSR_EXLOCK_BIT_AARCH64;
1893c789bfcSManish Pandey 	if (is_feat_gcs_present()) {
1903c789bfcSManish Pandey 		u_register_t gcscr;
1913c789bfcSManish Pandey 		if (target_el == MODE_EL2) {
1923c789bfcSManish Pandey 			gcscr = read_gcscr_el2();
1933c789bfcSManish Pandey 		} else {
1943c789bfcSManish Pandey 			gcscr = read_gcscr_el1();
1953c789bfcSManish Pandey 		}
1963c789bfcSManish Pandey 		new_spsr |= (gcscr & GCSCR_EXLOCK_EN_BIT) ? SPSR_EXLOCK_BIT_AARCH64 : 0;
1973c789bfcSManish Pandey 	}
1983c789bfcSManish Pandey 
1993c789bfcSManish Pandey 	return new_spsr;
2003c789bfcSManish Pandey }
2013c789bfcSManish Pandey 
2023c789bfcSManish Pandey /*
2033c789bfcSManish Pandey  * Handler for injecting Undefined exception to lower EL which is caused by
2043c789bfcSManish Pandey  * lower EL accessing system registers of which (old)EL3 firmware is unaware.
2053c789bfcSManish Pandey  *
2063c789bfcSManish Pandey  * This is a safety net to avoid EL3 panics caused by system register access
2073c789bfcSManish Pandey  * that triggers an exception syndrome EC=0x18.
2083c789bfcSManish Pandey  */
2093c789bfcSManish Pandey void inject_undef64(cpu_context_t *ctx)
2103c789bfcSManish Pandey {
2113c789bfcSManish Pandey 	u_register_t esr = (EC_UNKNOWN << ESR_EC_SHIFT) | ESR_IL_BIT;
2123c789bfcSManish Pandey 	el3_state_t *state = get_el3state_ctx(ctx);
2133c789bfcSManish Pandey 	u_register_t elr_el3 = read_ctx_reg(state, CTX_ELR_EL3);
2143c789bfcSManish Pandey 	u_register_t old_spsr = read_ctx_reg(state, CTX_SPSR_EL3);
2153c789bfcSManish Pandey 	u_register_t scr_el3 = read_ctx_reg(state, CTX_SCR_EL3);
2163c789bfcSManish Pandey 	u_register_t new_spsr = 0;
2173c789bfcSManish Pandey 	unsigned int to_el = target_el(GET_EL(old_spsr), scr_el3);
2183c789bfcSManish Pandey 
2193c789bfcSManish Pandey 	if (to_el == MODE_EL2) {
2203c789bfcSManish Pandey 		write_elr_el2(elr_el3);
2213c789bfcSManish Pandey 		elr_el3 = get_elr_el3(old_spsr, read_vbar_el2(), to_el);
2223c789bfcSManish Pandey 		write_esr_el2(esr);
2233c789bfcSManish Pandey 		write_spsr_el2(old_spsr);
2243c789bfcSManish Pandey 	} else {
2253c789bfcSManish Pandey 		write_elr_el1(elr_el3);
2263c789bfcSManish Pandey 		elr_el3 = get_elr_el3(old_spsr, read_vbar_el1(), to_el);
2273c789bfcSManish Pandey 		write_esr_el1(esr);
2283c789bfcSManish Pandey 		write_spsr_el1(old_spsr);
2293c789bfcSManish Pandey 	}
2303c789bfcSManish Pandey 
2313c789bfcSManish Pandey 	new_spsr = create_spsr(old_spsr, to_el);
2323c789bfcSManish Pandey 
2333c789bfcSManish Pandey 	write_ctx_reg(state, CTX_SPSR_EL3, new_spsr);
2343c789bfcSManish Pandey 	write_ctx_reg(state, CTX_ELR_EL3, elr_el3);
2353c789bfcSManish Pandey }
236