1/* 2 * Copyright (c) 2019-2025, Arm Limited. All rights reserved. 3 * Copyright (c) 2021-2023, NVIDIA Corporation. All rights reserved. 4 * 5 * SPDX-License-Identifier: BSD-3-Clause 6 */ 7 8#include <arch.h> 9#include <asm_macros.S> 10#include <common/bl_common.h> 11#include <cortex_a78_ae.h> 12#include <cpu_macros.S> 13#include <plat_macros.S> 14#include "wa_cve_2022_23960_bhb_vector.S" 15 16/* Hardware handled coherency */ 17#if HW_ASSISTED_COHERENCY == 0 18#error "cortex_a78_ae must be compiled with HW_ASSISTED_COHERENCY enabled" 19#endif 20 21#if WORKAROUND_CVE_2022_23960 22 wa_cve_2022_23960_bhb_vector_table CORTEX_A78_AE_BHB_LOOP_COUNT, cortex_a78_ae 23#endif /* WORKAROUND_CVE_2022_23960 */ 24 25cpu_reset_prologue cortex_a78_ae 26 27workaround_reset_start cortex_a78_ae, ERRATUM(1941500), ERRATA_A78_AE_1941500 28 sysreg_bit_set CORTEX_A78_AE_CPUECTLR_EL1, CORTEX_A78_AE_CPUECTLR_EL1_BIT_8 29workaround_reset_end cortex_a78_ae, ERRATUM(1941500) 30 31check_erratum_ls cortex_a78_ae, ERRATUM(1941500), CPU_REV(0, 1) 32 33workaround_reset_start cortex_a78_ae, ERRATUM(1951502), ERRATA_A78_AE_1951502 34 msr S3_6_c15_c8_0, xzr 35 ldr x0, =0x10E3900002 36 msr S3_6_c15_c8_2, x0 37 ldr x0, =0x10FFF00083 38 msr S3_6_c15_c8_3, x0 39 ldr x0, =0x2001003FF 40 msr S3_6_c15_c8_1, x0 41 42 mov x0, #1 43 msr S3_6_c15_c8_0, x0 44 ldr x0, =0x10E3800082 45 msr S3_6_c15_c8_2, x0 46 ldr x0, =0x10FFF00083 47 msr S3_6_c15_c8_3, x0 48 ldr x0, =0x2001003FF 49 msr S3_6_c15_c8_1, x0 50 51 mov x0, #2 52 msr S3_6_c15_c8_0, x0 53 ldr x0, =0x10E3800200 54 msr S3_6_c15_c8_2, x0 55 ldr x0, =0x10FFF003E0 56 msr S3_6_c15_c8_3, x0 57 ldr x0, =0x2001003FF 58 msr S3_6_c15_c8_1, x0 59workaround_reset_end cortex_a78_ae, ERRATUM(1951502) 60 61check_erratum_ls cortex_a78_ae, ERRATUM(1951502), CPU_REV(0, 1) 62 63workaround_reset_start cortex_a78_ae, ERRATUM(2376748), ERRATA_A78_AE_2376748 64 /* ------------------------------------------------------- 65 * Set CPUACTLR2_EL1[0] to 1 to force PLDW/PFRM ST to 66 * behave like PLD/PRFM LD and not cause invalidations to 67 * other PE caches. There might be a small performance 68 * degradation to this workaround for certain workloads 69 * that share data. 70 * ------------------------------------------------------- 71 */ 72 sysreg_bit_set CORTEX_A78_AE_ACTLR2_EL1, CORTEX_A78_AE_ACTLR2_EL1_BIT_0 73workaround_reset_end cortex_a78_ae, ERRATUM(2376748) 74 75check_erratum_ls cortex_a78_ae, ERRATUM(2376748), CPU_REV(0, 2) 76 77workaround_reset_start cortex_a78_ae, ERRATUM(2395408), ERRATA_A78_AE_2395408 78 /* -------------------------------------------------------- 79 * Disable folding of demand requests into older prefetches 80 * with L2 miss requests outstanding by setting the 81 * CPUACTLR2_EL1[40] to 1. 82 * -------------------------------------------------------- 83 */ 84 sysreg_bit_set CORTEX_A78_AE_ACTLR2_EL1, CORTEX_A78_AE_ACTLR2_EL1_BIT_40 85workaround_reset_end cortex_a78_ae, ERRATUM(2395408) 86 87check_erratum_ls cortex_a78_ae, ERRATUM(2395408), CPU_REV(0, 1) 88 89workaround_reset_start cortex_a78_ae, CVE(2022, 23960), WORKAROUND_CVE_2022_23960 90#if IMAGE_BL31 91 /* 92 * The Cortex-A78AE generic vectors are overridden to apply errata 93 * mitigation on exception entry from lower ELs. 94 */ 95 override_vector_table wa_cve_vbar_cortex_a78_ae 96#endif /* IMAGE_BL31 */ 97workaround_reset_end cortex_a78_ae, CVE(2022, 23960) 98 99check_erratum_chosen cortex_a78_ae, CVE(2022, 23960), WORKAROUND_CVE_2022_23960 100 101/* Disable hardware page aggregation. Enables mitigation for `CVE-2024-5660` */ 102workaround_reset_start cortex_a78_ae, CVE(2024, 5660), WORKAROUND_CVE_2024_5660 103 sysreg_bit_set CORTEX_A78_AE_CPUECTLR_EL1, BIT(46) 104workaround_reset_end cortex_a78_ae, CVE(2024, 5660) 105 106check_erratum_ls cortex_a78_ae, CVE(2024, 5660), CPU_REV(0, 3) 107 108cpu_reset_func_start cortex_a78_ae 109#if ENABLE_FEAT_AMU 110 /* Make sure accesses from EL0/EL1 and EL2 are not trapped to EL3 */ 111 sysreg_bit_clear actlr_el3, CORTEX_A78_ACTLR_TAM_BIT 112 113 /* Make sure accesses from non-secure EL0/EL1 are not trapped to EL2 */ 114 sysreg_bit_clear actlr_el2, CORTEX_A78_ACTLR_TAM_BIT 115 116 /* Enable group0 counters */ 117 mov x0, #CORTEX_A78_AMU_GROUP0_MASK 118 msr CPUAMCNTENSET0_EL0, x0 119 120 /* Enable group1 counters */ 121 mov x0, #CORTEX_A78_AMU_GROUP1_MASK 122 msr CPUAMCNTENSET1_EL0, x0 123#endif 124cpu_reset_func_end cortex_a78_ae 125 126 /* ------------------------------------------------------- 127 * HW will do the cache maintenance while powering down 128 * ------------------------------------------------------- 129 */ 130func cortex_a78_ae_core_pwr_dwn 131 /* ------------------------------------------------------- 132 * Enable CPU power down bit in power control register 133 * ------------------------------------------------------- 134 */ 135 sysreg_bit_set CORTEX_A78_CPUPWRCTLR_EL1, CORTEX_A78_CPUPWRCTLR_EL1_CORE_PWRDN_EN_BIT 136 isb 137 ret 138endfunc cortex_a78_ae_core_pwr_dwn 139 140 /* ------------------------------------------------------- 141 * This function provides cortex_a78_ae specific 142 * register information for crash reporting. 143 * It needs to return with x6 pointing to 144 * a list of register names in ascii and 145 * x8 - x15 having values of registers to be 146 * reported. 147 * ------------------------------------------------------- 148 */ 149.section .rodata.cortex_a78_ae_regs, "aS" 150cortex_a78_ae_regs: /* The ascii list of register names to be reported */ 151 .asciz "cpuectlr_el1", "" 152 153func cortex_a78_ae_cpu_reg_dump 154 adr x6, cortex_a78_ae_regs 155 mrs x8, CORTEX_A78_CPUECTLR_EL1 156 ret 157endfunc cortex_a78_ae_cpu_reg_dump 158 159declare_cpu_ops cortex_a78_ae, CORTEX_A78_AE_MIDR, \ 160 cortex_a78_ae_reset_func, \ 161 cortex_a78_ae_core_pwr_dwn 162