xref: /rk3399_ARM-atf/lib/cpus/aarch64/cortex_a76.S (revision c1e5f0cf9341ff1d1cfc8498a3a8ee8bc373742c)
1abbffe98SIsla Mitchell/*
2b62673c6SBoyan Karatotev * Copyright (c) 2017-2025, Arm Limited and Contributors. All rights reserved.
3abbffe98SIsla Mitchell *
4abbffe98SIsla Mitchell * SPDX-License-Identifier: BSD-3-Clause
5abbffe98SIsla Mitchell */
6abbffe98SIsla Mitchell
7abbffe98SIsla Mitchell#include <arch.h>
8abbffe98SIsla Mitchell#include <asm_macros.S>
909d40e0eSAntonio Nino Diaz#include <common/bl_common.h>
10abbffe98SIsla Mitchell#include <cortex_a76.h>
11abbffe98SIsla Mitchell#include <cpu_macros.S>
12b62673c6SBoyan Karatotev#include <dsu_macros.S>
13abbffe98SIsla Mitchell#include <plat_macros.S>
1409d40e0eSAntonio Nino Diaz#include <services/arm_arch_svc.h>
15a10a5cb6SBipin Ravi#include "wa_cve_2022_23960_bhb.S"
16abbffe98SIsla Mitchell
17076b5f02SJohn Tsichritzis/* Hardware handled coherency */
18076b5f02SJohn Tsichritzis#if HW_ASSISTED_COHERENCY == 0
19076b5f02SJohn Tsichritzis#error "Cortex-A76 must be compiled with HW_ASSISTED_COHERENCY enabled"
20076b5f02SJohn Tsichritzis#endif
216cc743cfSSaurabh Gorecha	.globl cortex_a76_reset_func
226cc743cfSSaurabh Gorecha	.globl cortex_a76_core_pwr_dwn
236cc743cfSSaurabh Gorecha	.globl cortex_a76_disable_wa_cve_2018_3639
24076b5f02SJohn Tsichritzis
25629d04f5SJohn Tsichritzis/* 64-bit only core */
26629d04f5SJohn Tsichritzis#if CTX_INCLUDE_AARCH32_REGS == 1
27629d04f5SJohn Tsichritzis#error "Cortex-A76 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
28629d04f5SJohn Tsichritzis#endif
29629d04f5SJohn Tsichritzis
30d6b79809SDimitris Papastamos#define ESR_EL3_A64_SMC0	0x5e000000
31d6b79809SDimitris Papastamos#define ESR_EL3_A32_SMC0	0x4e000000
32d6b79809SDimitris Papastamos
3389dba82dSBoyan Karatotevcpu_reset_prologue cortex_a76
3489dba82dSBoyan Karatotev
35d0d115e2SAmbroise Vincent#if DYNAMIC_WORKAROUND_CVE_2018_3639
36d6b79809SDimitris Papastamos	/*
37d6b79809SDimitris Papastamos	 * This macro applies the mitigation for CVE-2018-3639.
38e8383be4SAmbroise Vincent	 * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
39d6b79809SDimitris Papastamos	 * SMC calls from a lower EL running in AArch32 or AArch64
40d6b79809SDimitris Papastamos	 * will go through the fast and return early.
41d6b79809SDimitris Papastamos	 *
42d6b79809SDimitris Papastamos	 * The macro saves x2-x3 to the context. In the fast path
43d6b79809SDimitris Papastamos	 * x0-x3 registers do not need to be restored as the calling
4492108104SBipin Ravi	 * context will have saved them. The macro also saves
4592108104SBipin Ravi	 * x29-x30 to the context in the sync_exception path.
46d6b79809SDimitris Papastamos	 */
47d6b79809SDimitris Papastamos	.macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
48d6b79809SDimitris Papastamos	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
49d6b79809SDimitris Papastamos	.if \_is_sync_exception
5092108104SBipin Ravi	stp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
51d6b79809SDimitris Papastamos	mov_imm	w2, \_esr_el3_val
5292108104SBipin Ravi	bl	apply_cve_2018_3639_sync_wa
5392108104SBipin Ravi	ldp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
54d6b79809SDimitris Papastamos	.endif
55d6b79809SDimitris Papastamos	/*
56d6b79809SDimitris Papastamos	 * Always enable v4 mitigation during EL3 execution. This is not
57d6b79809SDimitris Papastamos	 * required for the fast path above because it does not perform any
58d6b79809SDimitris Papastamos	 * memory loads.
59d6b79809SDimitris Papastamos	 */
60d6b79809SDimitris Papastamos	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
61d6b79809SDimitris Papastamos	orr	x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
62d6b79809SDimitris Papastamos	msr	CORTEX_A76_CPUACTLR2_EL1, x2
63d6b79809SDimitris Papastamos	isb
64d6b79809SDimitris Papastamos
65d6b79809SDimitris Papastamos	/*
66d6b79809SDimitris Papastamos	 * The caller may have passed arguments to EL3 via x2-x3.
67d6b79809SDimitris Papastamos	 * Restore these registers from the context before jumping to the
68d6b79809SDimitris Papastamos	 * main runtime vector table entry.
69d6b79809SDimitris Papastamos	 */
70d6b79809SDimitris Papastamos	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
71d6b79809SDimitris Papastamos	.endm
72a10a5cb6SBipin Ravi#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
73d6b79809SDimitris Papastamos
74a10a5cb6SBipin Ravi#if DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960
75a10a5cb6SBipin Ravivector_base cortex_a76_wa_cve_vbar
76d6b79809SDimitris Papastamos
77d6b79809SDimitris Papastamos	/* ---------------------------------------------------------------------
78d6b79809SDimitris Papastamos	 * Current EL with SP_EL0 : 0x0 - 0x200
79d6b79809SDimitris Papastamos	 * ---------------------------------------------------------------------
80d6b79809SDimitris Papastamos	 */
81d6b79809SDimitris Papastamosvector_entry cortex_a76_sync_exception_sp_el0
82d6b79809SDimitris Papastamos	b	sync_exception_sp_el0
83a9203edaSRoberto Vargasend_vector_entry cortex_a76_sync_exception_sp_el0
84d6b79809SDimitris Papastamos
85d6b79809SDimitris Papastamosvector_entry cortex_a76_irq_sp_el0
86d6b79809SDimitris Papastamos	b	irq_sp_el0
87a9203edaSRoberto Vargasend_vector_entry cortex_a76_irq_sp_el0
88d6b79809SDimitris Papastamos
89d6b79809SDimitris Papastamosvector_entry cortex_a76_fiq_sp_el0
90d6b79809SDimitris Papastamos	b	fiq_sp_el0
91a9203edaSRoberto Vargasend_vector_entry cortex_a76_fiq_sp_el0
92d6b79809SDimitris Papastamos
93d6b79809SDimitris Papastamosvector_entry cortex_a76_serror_sp_el0
94d6b79809SDimitris Papastamos	b	serror_sp_el0
95a9203edaSRoberto Vargasend_vector_entry cortex_a76_serror_sp_el0
96d6b79809SDimitris Papastamos
97d6b79809SDimitris Papastamos	/* ---------------------------------------------------------------------
98d6b79809SDimitris Papastamos	 * Current EL with SP_ELx: 0x200 - 0x400
99d6b79809SDimitris Papastamos	 * ---------------------------------------------------------------------
100d6b79809SDimitris Papastamos	 */
101d6b79809SDimitris Papastamosvector_entry cortex_a76_sync_exception_sp_elx
102d6b79809SDimitris Papastamos	b	sync_exception_sp_elx
103a9203edaSRoberto Vargasend_vector_entry cortex_a76_sync_exception_sp_elx
104d6b79809SDimitris Papastamos
105d6b79809SDimitris Papastamosvector_entry cortex_a76_irq_sp_elx
106d6b79809SDimitris Papastamos	b	irq_sp_elx
107a9203edaSRoberto Vargasend_vector_entry cortex_a76_irq_sp_elx
108d6b79809SDimitris Papastamos
109d6b79809SDimitris Papastamosvector_entry cortex_a76_fiq_sp_elx
110d6b79809SDimitris Papastamos	b	fiq_sp_elx
111a9203edaSRoberto Vargasend_vector_entry cortex_a76_fiq_sp_elx
112d6b79809SDimitris Papastamos
113d6b79809SDimitris Papastamosvector_entry cortex_a76_serror_sp_elx
114d6b79809SDimitris Papastamos	b	serror_sp_elx
115a9203edaSRoberto Vargasend_vector_entry cortex_a76_serror_sp_elx
116d6b79809SDimitris Papastamos
117d6b79809SDimitris Papastamos	/* ---------------------------------------------------------------------
118d6b79809SDimitris Papastamos	 * Lower EL using AArch64 : 0x400 - 0x600
119d6b79809SDimitris Papastamos	 * ---------------------------------------------------------------------
120d6b79809SDimitris Papastamos	 */
121d6b79809SDimitris Papastamosvector_entry cortex_a76_sync_exception_aarch64
122a10a5cb6SBipin Ravi
123a10a5cb6SBipin Ravi#if WORKAROUND_CVE_2022_23960
124a10a5cb6SBipin Ravi	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
125a10a5cb6SBipin Ravi#endif /* WORKAROUND_CVE_2022_23960 */
126a10a5cb6SBipin Ravi
127a10a5cb6SBipin Ravi#if DYNAMIC_WORKAROUND_CVE_2018_3639
128d6b79809SDimitris Papastamos	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
129a10a5cb6SBipin Ravi#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
130a10a5cb6SBipin Ravi
131d6b79809SDimitris Papastamos	b	sync_exception_aarch64
132a9203edaSRoberto Vargasend_vector_entry cortex_a76_sync_exception_aarch64
133d6b79809SDimitris Papastamos
134d6b79809SDimitris Papastamosvector_entry cortex_a76_irq_aarch64
135a10a5cb6SBipin Ravi
136a10a5cb6SBipin Ravi#if WORKAROUND_CVE_2022_23960
137a10a5cb6SBipin Ravi	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
138a10a5cb6SBipin Ravi#endif /* WORKAROUND_CVE_2022_23960 */
139a10a5cb6SBipin Ravi
140a10a5cb6SBipin Ravi#if DYNAMIC_WORKAROUND_CVE_2018_3639
141d6b79809SDimitris Papastamos	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
142a10a5cb6SBipin Ravi#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
143a10a5cb6SBipin Ravi
144d6b79809SDimitris Papastamos	b	irq_aarch64
145a9203edaSRoberto Vargasend_vector_entry cortex_a76_irq_aarch64
146d6b79809SDimitris Papastamos
147d6b79809SDimitris Papastamosvector_entry cortex_a76_fiq_aarch64
148a10a5cb6SBipin Ravi
149a10a5cb6SBipin Ravi#if WORKAROUND_CVE_2022_23960
150a10a5cb6SBipin Ravi	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
151a10a5cb6SBipin Ravi#endif /* WORKAROUND_CVE_2022_23960 */
152a10a5cb6SBipin Ravi
153a10a5cb6SBipin Ravi#if DYNAMIC_WORKAROUND_CVE_2018_3639
154d6b79809SDimitris Papastamos	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
155a10a5cb6SBipin Ravi#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
156a10a5cb6SBipin Ravi
157d6b79809SDimitris Papastamos	b	fiq_aarch64
158a9203edaSRoberto Vargasend_vector_entry cortex_a76_fiq_aarch64
159d6b79809SDimitris Papastamos
160d6b79809SDimitris Papastamosvector_entry cortex_a76_serror_aarch64
161a10a5cb6SBipin Ravi
162a10a5cb6SBipin Ravi#if WORKAROUND_CVE_2022_23960
163a10a5cb6SBipin Ravi	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
164a10a5cb6SBipin Ravi#endif /* WORKAROUND_CVE_2022_23960 */
165a10a5cb6SBipin Ravi
166a10a5cb6SBipin Ravi#if DYNAMIC_WORKAROUND_CVE_2018_3639
167d6b79809SDimitris Papastamos	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
168a10a5cb6SBipin Ravi#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
169a10a5cb6SBipin Ravi
170d6b79809SDimitris Papastamos	b	serror_aarch64
171a9203edaSRoberto Vargasend_vector_entry cortex_a76_serror_aarch64
172d6b79809SDimitris Papastamos
173d6b79809SDimitris Papastamos	/* ---------------------------------------------------------------------
174d6b79809SDimitris Papastamos	 * Lower EL using AArch32 : 0x600 - 0x800
175d6b79809SDimitris Papastamos	 * ---------------------------------------------------------------------
176d6b79809SDimitris Papastamos	 */
177d6b79809SDimitris Papastamosvector_entry cortex_a76_sync_exception_aarch32
178a10a5cb6SBipin Ravi
179a10a5cb6SBipin Ravi#if WORKAROUND_CVE_2022_23960
180a10a5cb6SBipin Ravi	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
181a10a5cb6SBipin Ravi#endif /* WORKAROUND_CVE_2022_23960 */
182a10a5cb6SBipin Ravi
183a10a5cb6SBipin Ravi#if DYNAMIC_WORKAROUND_CVE_2018_3639
184d6b79809SDimitris Papastamos	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
185a10a5cb6SBipin Ravi#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
186a10a5cb6SBipin Ravi
187d6b79809SDimitris Papastamos	b	sync_exception_aarch32
188a9203edaSRoberto Vargasend_vector_entry cortex_a76_sync_exception_aarch32
189d6b79809SDimitris Papastamos
190d6b79809SDimitris Papastamosvector_entry cortex_a76_irq_aarch32
191a10a5cb6SBipin Ravi
192a10a5cb6SBipin Ravi#if WORKAROUND_CVE_2022_23960
193a10a5cb6SBipin Ravi	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
194a10a5cb6SBipin Ravi#endif /* WORKAROUND_CVE_2022_23960 */
195a10a5cb6SBipin Ravi
196a10a5cb6SBipin Ravi#if DYNAMIC_WORKAROUND_CVE_2018_3639
197d6b79809SDimitris Papastamos	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
198a10a5cb6SBipin Ravi#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
199a10a5cb6SBipin Ravi
200d6b79809SDimitris Papastamos	b	irq_aarch32
201a9203edaSRoberto Vargasend_vector_entry cortex_a76_irq_aarch32
202d6b79809SDimitris Papastamos
203d6b79809SDimitris Papastamosvector_entry cortex_a76_fiq_aarch32
204a10a5cb6SBipin Ravi
205a10a5cb6SBipin Ravi#if WORKAROUND_CVE_2022_23960
206a10a5cb6SBipin Ravi	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
207a10a5cb6SBipin Ravi#endif /* WORKAROUND_CVE_2022_23960 */
208a10a5cb6SBipin Ravi
209a10a5cb6SBipin Ravi#if DYNAMIC_WORKAROUND_CVE_2018_3639
210d6b79809SDimitris Papastamos	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
211a10a5cb6SBipin Ravi#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
212a10a5cb6SBipin Ravi
213d6b79809SDimitris Papastamos	b	fiq_aarch32
214a9203edaSRoberto Vargasend_vector_entry cortex_a76_fiq_aarch32
215d6b79809SDimitris Papastamos
216d6b79809SDimitris Papastamosvector_entry cortex_a76_serror_aarch32
217a10a5cb6SBipin Ravi
218a10a5cb6SBipin Ravi#if WORKAROUND_CVE_2022_23960
219a10a5cb6SBipin Ravi	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
220a10a5cb6SBipin Ravi#endif /* WORKAROUND_CVE_2022_23960 */
221a10a5cb6SBipin Ravi
222a10a5cb6SBipin Ravi#if DYNAMIC_WORKAROUND_CVE_2018_3639
223d6b79809SDimitris Papastamos	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
224a10a5cb6SBipin Ravi#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
225a10a5cb6SBipin Ravi
226d6b79809SDimitris Papastamos	b	serror_aarch32
227a9203edaSRoberto Vargasend_vector_entry cortex_a76_serror_aarch32
228a10a5cb6SBipin Ravi#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960 */
22992108104SBipin Ravi
230a10a5cb6SBipin Ravi#if DYNAMIC_WORKAROUND_CVE_2018_3639
23192108104SBipin Ravi	/*
23292108104SBipin Ravi	 * -----------------------------------------------------------------
23392108104SBipin Ravi	 * This function applies the mitigation for CVE-2018-3639
23492108104SBipin Ravi	 * specifically for sync exceptions. It implements a fast path
23592108104SBipin Ravi	 * where `SMCCC_ARCH_WORKAROUND_2` SMC calls from a lower EL
23692108104SBipin Ravi	 * running in AArch64 will go through the fast and return early.
23792108104SBipin Ravi	 *
23892108104SBipin Ravi	 * In the fast path x0-x3 registers do not need to be restored as the
23992108104SBipin Ravi	 * calling context will have saved them.
24092108104SBipin Ravi	 *
24192108104SBipin Ravi	 * Caller must pass value of esr_el3 to compare via x2.
24292108104SBipin Ravi	 * Save and restore these registers outside of this function from the
24392108104SBipin Ravi	 * context before jumping to the main runtime vector table entry.
24492108104SBipin Ravi	 *
24592108104SBipin Ravi	 * Shall clobber: x0-x3, x30
24692108104SBipin Ravi	 * -----------------------------------------------------------------
24792108104SBipin Ravi	 */
24892108104SBipin Ravifunc apply_cve_2018_3639_sync_wa
24992108104SBipin Ravi	/*
25092108104SBipin Ravi	 * Ensure SMC is coming from A64/A32 state on #0
25192108104SBipin Ravi	 * with W0 = SMCCC_ARCH_WORKAROUND_2
25292108104SBipin Ravi	 *
25392108104SBipin Ravi	 * This sequence evaluates as:
25492108104SBipin Ravi	 *    (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
25592108104SBipin Ravi	 * allowing use of a single branch operation
25692108104SBipin Ravi	 * X2 populated outside this function with the SMC FID.
25792108104SBipin Ravi	 */
25892108104SBipin Ravi	orr	w3, wzr, #SMCCC_ARCH_WORKAROUND_2
25992108104SBipin Ravi	cmp	x0, x3
26092108104SBipin Ravi	mrs	x3, esr_el3
26192108104SBipin Ravi
26292108104SBipin Ravi	ccmp	w2, w3, #0, eq
26392108104SBipin Ravi	/*
26492108104SBipin Ravi	 * Static predictor will predict a fall-through, optimizing
26592108104SBipin Ravi	 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
26692108104SBipin Ravi	 */
26792108104SBipin Ravi	bne	1f
26892108104SBipin Ravi
26992108104SBipin Ravi	/*
27092108104SBipin Ravi	* The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
27192108104SBipin Ravi	* fast path.
27292108104SBipin Ravi	*/
27392108104SBipin Ravi	cmp	x1, xzr /* enable/disable check */
27492108104SBipin Ravi
27592108104SBipin Ravi	/*
27692108104SBipin Ravi	 * When the calling context wants mitigation disabled,
27792108104SBipin Ravi	 * we program the mitigation disable function in the
27892108104SBipin Ravi	 * CPU context, which gets invoked on subsequent exits from
27992108104SBipin Ravi	 * EL3 via the `el3_exit` function. Otherwise NULL is
28092108104SBipin Ravi	 * programmed in the CPU context, which results in caller's
28192108104SBipin Ravi	 * inheriting the EL3 mitigation state (enabled) on subsequent
28292108104SBipin Ravi	 * `el3_exit`.
28392108104SBipin Ravi	 */
28492108104SBipin Ravi	mov	x0, xzr
28592108104SBipin Ravi	adr	x1, cortex_a76_disable_wa_cve_2018_3639
28692108104SBipin Ravi	csel	x1, x1, x0, eq
28792108104SBipin Ravi	str	x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
28892108104SBipin Ravi
28992108104SBipin Ravi	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
29092108104SBipin Ravi	orr	x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
29192108104SBipin Ravi	bic	x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
29292108104SBipin Ravi	csel	x3, x3, x1, eq
29392108104SBipin Ravi	msr	CORTEX_A76_CPUACTLR2_EL1, x3
29492108104SBipin Ravi	ldp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
29592108104SBipin Ravi	/*
29692108104SBipin Ravi	* `SMCCC_ARCH_WORKAROUND_2`fast path return to lower EL.
29792108104SBipin Ravi	*/
29892108104SBipin Ravi	exception_return /* exception_return contains ISB */
29992108104SBipin Ravi1:
30092108104SBipin Ravi	ret
30192108104SBipin Raviendfunc apply_cve_2018_3639_sync_wa
302d0d115e2SAmbroise Vincent#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
303d6b79809SDimitris Papastamos
304fd04156eSArvind Ram Prakash/* Erratum entry and check function for SMCCC_ARCH_WORKAROUND_2*/
305fd04156eSArvind Ram Prakashadd_erratum_entry cortex_a76, ERRATUM(ARCH_WORKAROUND_2), WORKAROUND_CVE_2018_3639
306fd04156eSArvind Ram Prakash
307fd04156eSArvind Ram Prakashcheck_erratum_chosen cortex_a76, ERRATUM(ARCH_WORKAROUND_2), WORKAROUND_CVE_2018_3639
308fd04156eSArvind Ram Prakash
309106ca0cbSGovindraj Rajaworkaround_reset_start cortex_a76, ERRATUM(798953), ERRATA_DSU_798953
310106ca0cbSGovindraj Raja	errata_dsu_798953_wa_impl
311106ca0cbSGovindraj Rajaworkaround_reset_end cortex_a76, ERRATUM(798953)
312106ca0cbSGovindraj Raja
313106ca0cbSGovindraj Rajacheck_erratum_custom_start cortex_a76, ERRATUM(798953)
314106ca0cbSGovindraj Raja	check_errata_dsu_798953_impl
315106ca0cbSGovindraj Raja	ret
316106ca0cbSGovindraj Rajacheck_erratum_custom_end cortex_a76, ERRATUM(798953)
317106ca0cbSGovindraj Raja
318106ca0cbSGovindraj Rajaworkaround_reset_start cortex_a76, ERRATUM(936184), ERRATA_DSU_936184
319106ca0cbSGovindraj Raja	errata_dsu_936184_wa_impl
320106ca0cbSGovindraj Rajaworkaround_reset_end cortex_a76, ERRATUM(936184)
321106ca0cbSGovindraj Raja
322106ca0cbSGovindraj Rajacheck_erratum_custom_start cortex_a76, ERRATUM(936184)
323106ca0cbSGovindraj Raja	check_errata_dsu_936184_impl
324106ca0cbSGovindraj Raja	ret
325106ca0cbSGovindraj Rajacheck_erratum_custom_end cortex_a76, ERRATUM(936184)
326106ca0cbSGovindraj Raja
3276fb2dbd2SGovindraj Rajaworkaround_reset_start cortex_a76, ERRATUM(1073348), ERRATA_A76_1073348
32853e02f2aSGovindraj Raja	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1 ,CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
3296fb2dbd2SGovindraj Rajaworkaround_reset_end cortex_a76, ERRATUM(1073348)
3305c6aa01aSLouis Mayencourt
3316fb2dbd2SGovindraj Rajacheck_erratum_ls cortex_a76, ERRATUM(1073348), CPU_REV(1, 0)
3325c6aa01aSLouis Mayencourt
3336fb2dbd2SGovindraj Rajaworkaround_reset_start cortex_a76, ERRATUM(1130799), ERRATA_A76_1130799
33453e02f2aSGovindraj Raja	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_BIT_59
335508d7110SLouis Mayencourt	msr	CORTEX_A76_CPUACTLR2_EL1, x1
3366fb2dbd2SGovindraj Rajaworkaround_reset_end cortex_a76, ERRATUM(1130799)
337508d7110SLouis Mayencourt
3386fb2dbd2SGovindraj Rajacheck_erratum_ls cortex_a76, ERRATUM(1130799), CPU_REV(2, 0)
339508d7110SLouis Mayencourt
340a74b0094SGovindraj Raja/* --------------------------------------------------------------
341a74b0094SGovindraj Raja * Errata Workaround for Cortex A76 Errata #1165522.
342a74b0094SGovindraj Raja * This applies only to revisions <= r3p0 of Cortex A76.
343a74b0094SGovindraj Raja * Due to the nature of the errata it is applied unconditionally
344a74b0094SGovindraj Raja * when built in, report it as applicable in this case
345a74b0094SGovindraj Raja * --------------------------------------------------------------
346a74b0094SGovindraj Raja */
347a74b0094SGovindraj Rajacheck_erratum_custom_start cortex_a76, ERRATUM(1165522)
348a74b0094SGovindraj Raja#if ERRATA_A76_1165522
349a74b0094SGovindraj Raja	mov	x0, #ERRATA_APPLIES
350a74b0094SGovindraj Raja#else
351a74b0094SGovindraj Raja	cpu_rev_var_ls	CPU_REV(3, 0)
352a74b0094SGovindraj Raja#endif
353a74b0094SGovindraj Raja	ret
354a74b0094SGovindraj Rajacheck_erratum_custom_end cortex_a76, ERRATUM(1165522)
355a74b0094SGovindraj Raja
356a74b0094SGovindraj Rajaadd_erratum_entry cortex_a76, ERRATUM(1165522), ERRATA_A76_1165522
357a74b0094SGovindraj Raja
3586fb2dbd2SGovindraj Rajaworkaround_reset_start cortex_a76, ERRATUM(1220197), ERRATA_A76_1220197
35953e02f2aSGovindraj Raja	sysreg_bit_set CORTEX_A76_CPUECTLR_EL1, CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
3606fb2dbd2SGovindraj Rajaworkaround_reset_end cortex_a76, ERRATUM(1220197)
3615cc8c7baSLouis Mayencourt
3626fb2dbd2SGovindraj Rajacheck_erratum_ls cortex_a76, ERRATUM(1220197), CPU_REV(2, 0)
3635cc8c7baSLouis Mayencourt
3646fb2dbd2SGovindraj Rajaworkaround_reset_start cortex_a76, ERRATUM(1257314), ERRATA_A76_1257314
36553e02f2aSGovindraj Raja	sysreg_bit_set CORTEX_A76_CPUACTLR3_EL1, CORTEX_A76_CPUACTLR3_EL1_BIT_10
3666fb2dbd2SGovindraj Rajaworkaround_reset_end cortex_a76, ERRATUM(1257314)
367e6e1d0acSSoby Mathew
3686fb2dbd2SGovindraj Rajacheck_erratum_ls cortex_a76, ERRATUM(1257314), CPU_REV(3, 0)
369e6e1d0acSSoby Mathew
3706fb2dbd2SGovindraj Rajaworkaround_reset_start cortex_a76, ERRATUM(1262606), ERRATA_A76_1262606
37153e02f2aSGovindraj Raja	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
3726fb2dbd2SGovindraj Rajaworkaround_reset_end cortex_a76, ERRATUM(1262606)
3736fb2dbd2SGovindraj Raja
3746fb2dbd2SGovindraj Rajacheck_erratum_ls cortex_a76, ERRATUM(1262606), CPU_REV(3, 0)
3756fb2dbd2SGovindraj Raja
3766fb2dbd2SGovindraj Rajaworkaround_reset_start cortex_a76, ERRATUM(1262888), ERRATA_A76_1262888
37753e02f2aSGovindraj Raja	sysreg_bit_set CORTEX_A76_CPUECTLR_EL1, CORTEX_A76_CPUECTLR_EL1_BIT_51
3786fb2dbd2SGovindraj Rajaworkaround_reset_end cortex_a76, ERRATUM(1262888)
379e6e1d0acSSoby Mathew
3806fb2dbd2SGovindraj Rajacheck_erratum_ls cortex_a76, ERRATUM(1262888), CPU_REV(3, 0)
381e6e1d0acSSoby Mathew
3826fb2dbd2SGovindraj Rajaworkaround_reset_start cortex_a76, ERRATUM(1275112), ERRATA_A76_1275112
38353e02f2aSGovindraj Raja	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
3846fb2dbd2SGovindraj Rajaworkaround_reset_end cortex_a76, ERRATUM(1275112)
3856fb2dbd2SGovindraj Raja
3866fb2dbd2SGovindraj Rajacheck_erratum_ls cortex_a76, ERRATUM(1275112), CPU_REV(3, 0)
3876fb2dbd2SGovindraj Raja
3886fb2dbd2SGovindraj Rajacheck_erratum_custom_start cortex_a76, ERRATUM(1286807)
389f85edceaSSoby Mathew#if ERRATA_A76_1286807
390f85edceaSSoby Mathew	mov x0, #ERRATA_APPLIES
391f85edceaSSoby Mathew#else
3927791ce21SBoyan Karatotev	cpu_rev_var_ls	CPU_REV(3, 0)
393f85edceaSSoby Mathew#endif
3947791ce21SBoyan Karatotev	ret
3956fb2dbd2SGovindraj Rajacheck_erratum_custom_end cortex_a76, ERRATUM(1286807)
396e6e1d0acSSoby Mathew
397a74b0094SGovindraj Rajaadd_erratum_entry cortex_a76, ERRATUM(1286807), ERRATA_A76_1286807
398a74b0094SGovindraj Raja
3996fb2dbd2SGovindraj Rajaworkaround_reset_start cortex_a76, ERRATUM(1791580), ERRATA_A76_1791580
40053e02f2aSGovindraj Raja	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_BIT_2
4016fb2dbd2SGovindraj Rajaworkaround_reset_end cortex_a76, ERRATUM(1791580)
402d7b08e69Sjohpow01
4036fb2dbd2SGovindraj Rajacheck_erratum_ls cortex_a76, ERRATUM(1791580), CPU_REV(4, 0)
404d7b08e69Sjohpow01
4056fb2dbd2SGovindraj Rajaworkaround_reset_start cortex_a76, ERRATUM(1868343), ERRATA_A76_1868343
40653e02f2aSGovindraj Raja	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
4076fb2dbd2SGovindraj Rajaworkaround_reset_end cortex_a76, ERRATUM(1868343)
40855ff05f3Sjohpow01
4096fb2dbd2SGovindraj Rajacheck_erratum_ls cortex_a76, ERRATUM(1868343), CPU_REV(4, 0)
41055ff05f3Sjohpow01
4116fb2dbd2SGovindraj Rajaworkaround_reset_start cortex_a76, ERRATUM(1946160), ERRATA_A76_1946160
4123f0d8369Sjohpow01	mov	x0, #3
4133f0d8369Sjohpow01	msr	S3_6_C15_C8_0, x0
4143f0d8369Sjohpow01	ldr	x0, =0x10E3900002
4153f0d8369Sjohpow01	msr	S3_6_C15_C8_2, x0
4163f0d8369Sjohpow01	ldr	x0, =0x10FFF00083
4173f0d8369Sjohpow01	msr	S3_6_C15_C8_3, x0
4183f0d8369Sjohpow01	ldr	x0, =0x2001003FF
4193f0d8369Sjohpow01	msr	S3_6_C15_C8_1, x0
4203f0d8369Sjohpow01
4213f0d8369Sjohpow01	mov	x0, #4
4223f0d8369Sjohpow01	msr	S3_6_C15_C8_0, x0
4233f0d8369Sjohpow01	ldr	x0, =0x10E3800082
4243f0d8369Sjohpow01	msr	S3_6_C15_C8_2, x0
4253f0d8369Sjohpow01	ldr	x0, =0x10FFF00083
4263f0d8369Sjohpow01	msr	S3_6_C15_C8_3, x0
4273f0d8369Sjohpow01	ldr	x0, =0x2001003FF
4283f0d8369Sjohpow01	msr	S3_6_C15_C8_1, x0
4293f0d8369Sjohpow01
4303f0d8369Sjohpow01	mov	x0, #5
4313f0d8369Sjohpow01	msr	S3_6_C15_C8_0, x0
4323f0d8369Sjohpow01	ldr	x0, =0x10E3800200
4333f0d8369Sjohpow01	msr	S3_6_C15_C8_2, x0
4343f0d8369Sjohpow01	ldr	x0, =0x10FFF003E0
4353f0d8369Sjohpow01	msr	S3_6_C15_C8_3, x0
4363f0d8369Sjohpow01	ldr	x0, =0x2001003FF
4373f0d8369Sjohpow01	msr	S3_6_C15_C8_1, x0
4386fb2dbd2SGovindraj Rajaworkaround_reset_end cortex_a76, ERRATUM(1946160)
4393f0d8369Sjohpow01
4406fb2dbd2SGovindraj Rajacheck_erratum_range cortex_a76, ERRATUM(1946160), CPU_REV(3, 0), CPU_REV(4, 1)
4413f0d8369Sjohpow01
4426fb2dbd2SGovindraj Rajaworkaround_runtime_start cortex_a76, ERRATUM(2743102), ERRATA_A76_2743102
44349273098SBipin Ravi	/* dsb before isb of power down sequence */
44449273098SBipin Ravi	dsb	sy
4456fb2dbd2SGovindraj Rajaworkaround_runtime_end cortex_a76, ERRATUM(2743102)
44649273098SBipin Ravi
4476fb2dbd2SGovindraj Rajacheck_erratum_ls cortex_a76, ERRATUM(2743102), CPU_REV(4, 1)
44849273098SBipin Ravi
4496fb2dbd2SGovindraj Rajacheck_erratum_chosen cortex_a76, CVE(2018, 3639), WORKAROUND_CVE_2018_3639
450d6b79809SDimitris Papastamos
451d6b79809SDimitris Papastamosfunc cortex_a76_disable_wa_cve_2018_3639
45253e02f2aSGovindraj Raja	sysreg_bit_clear CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
453d6b79809SDimitris Papastamos	isb
454d6b79809SDimitris Papastamos	ret
455d6b79809SDimitris Papastamosendfunc cortex_a76_disable_wa_cve_2018_3639
456d6b79809SDimitris Papastamos
457106ca0cbSGovindraj Rajacheck_erratum_chosen cortex_a76, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
458106ca0cbSGovindraj Raja
459106ca0cbSGovindraj Raja/* erratum has no workaround in the cpu. Generic code must take care */
460106ca0cbSGovindraj Rajaadd_erratum_entry cortex_a76, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
461106ca0cbSGovindraj Raja
4626fb2dbd2SGovindraj Rajacpu_reset_func_start cortex_a76
4633f0d8369Sjohpow01
464d6b79809SDimitris Papastamos#if WORKAROUND_CVE_2018_3639
46548e1d350SJeenu Viswambharan	/* If the PE implements SSBS, we don't need the dynamic workaround */
46648e1d350SJeenu Viswambharan	mrs	x0, id_aa64pfr1_el1
46748e1d350SJeenu Viswambharan	lsr	x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
46848e1d350SJeenu Viswambharan	and	x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
469d0d115e2SAmbroise Vincent#if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
470d0d115e2SAmbroise Vincent	cmp	x0, 0
471d0d115e2SAmbroise Vincent	ASM_ASSERT(ne)
472d0d115e2SAmbroise Vincent#endif
473d0d115e2SAmbroise Vincent#if DYNAMIC_WORKAROUND_CVE_2018_3639
47448e1d350SJeenu Viswambharan	cbnz	x0, 1f
47553e02f2aSGovindraj Raja	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
476d6b79809SDimitris Papastamos	isb
477d6b79809SDimitris Papastamos
47848e1d350SJeenu Viswambharan#ifdef IMAGE_BL31
479d6b79809SDimitris Papastamos	/*
480d6b79809SDimitris Papastamos	 * The Cortex-A76 generic vectors are overwritten to use the vectors
481d6b79809SDimitris Papastamos	 * defined above. This is required in order to apply mitigation
482d6b79809SDimitris Papastamos	 * against CVE-2018-3639 on exception entry from lower ELs.
483a10a5cb6SBipin Ravi	 * If the below vector table is used, skip overriding it again for
484a10a5cb6SBipin Ravi	 *  CVE_2022_23960 as both use the same vbar.
485d6b79809SDimitris Papastamos	 */
48653e02f2aSGovindraj Raja	override_vector_table cortex_a76_wa_cve_vbar
487d6b79809SDimitris Papastamos	isb
488a10a5cb6SBipin Ravi	b	2f
489d0d115e2SAmbroise Vincent#endif /* IMAGE_BL31 */
4908a677180SJohn Tsichritzis
49148e1d350SJeenu Viswambharan1:
492d0d115e2SAmbroise Vincent#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
493d0d115e2SAmbroise Vincent#endif /* WORKAROUND_CVE_2018_3639 */
49448e1d350SJeenu Viswambharan
495a10a5cb6SBipin Ravi#if IMAGE_BL31 && WORKAROUND_CVE_2022_23960
496a10a5cb6SBipin Ravi	/*
497a10a5cb6SBipin Ravi	 * The Cortex-A76 generic vectors are overridden to apply errata
498a10a5cb6SBipin Ravi	 * mitigation on exception entry from lower ELs. This will be bypassed
499a10a5cb6SBipin Ravi	 * if DYNAMIC_WORKAROUND_CVE_2018_3639 has overridden the vectors.
500a10a5cb6SBipin Ravi	 */
50153e02f2aSGovindraj Raja	override_vector_table cortex_a76_wa_cve_vbar
502a10a5cb6SBipin Ravi	isb
503a10a5cb6SBipin Ravi#endif /* IMAGE_BL31 && WORKAROUND_CVE_2022_23960 */
504a10a5cb6SBipin Ravi2:
5056fb2dbd2SGovindraj Rajacpu_reset_func_end cortex_a76
506d6b79809SDimitris Papastamos
507abbffe98SIsla Mitchell	/* ---------------------------------------------
508abbffe98SIsla Mitchell	 * HW will do the cache maintenance while powering down
509abbffe98SIsla Mitchell	 * ---------------------------------------------
510abbffe98SIsla Mitchell	 */
511abbffe98SIsla Mitchellfunc cortex_a76_core_pwr_dwn
512abbffe98SIsla Mitchell	/* ---------------------------------------------
513abbffe98SIsla Mitchell	 * Enable CPU power down bit in power control register
514abbffe98SIsla Mitchell	 * ---------------------------------------------
515abbffe98SIsla Mitchell	 */
51653e02f2aSGovindraj Raja	sysreg_bit_set CORTEX_A76_CPUPWRCTLR_EL1, CORTEX_A76_CORE_PWRDN_EN_MASK
5176fb2dbd2SGovindraj Raja
518*645917abSBoyan Karatotev	apply_erratum cortex_a76, ERRATUM(2743102), ERRATA_A76_2743102
5196fb2dbd2SGovindraj Raja
520abbffe98SIsla Mitchell	isb
521abbffe98SIsla Mitchell	ret
522abbffe98SIsla Mitchellendfunc cortex_a76_core_pwr_dwn
523abbffe98SIsla Mitchell
524abbffe98SIsla Mitchell	/* ---------------------------------------------
525abbffe98SIsla Mitchell	 * This function provides cortex_a76 specific
526abbffe98SIsla Mitchell	 * register information for crash reporting.
527abbffe98SIsla Mitchell	 * It needs to return with x6 pointing to
528abbffe98SIsla Mitchell	 * a list of register names in ascii and
529abbffe98SIsla Mitchell	 * x8 - x15 having values of registers to be
530abbffe98SIsla Mitchell	 * reported.
531abbffe98SIsla Mitchell	 * ---------------------------------------------
532abbffe98SIsla Mitchell	 */
533abbffe98SIsla Mitchell.section .rodata.cortex_a76_regs, "aS"
534abbffe98SIsla Mitchellcortex_a76_regs:  /* The ascii list of register names to be reported */
535abbffe98SIsla Mitchell	.asciz	"cpuectlr_el1", ""
536abbffe98SIsla Mitchell
537abbffe98SIsla Mitchellfunc cortex_a76_cpu_reg_dump
538abbffe98SIsla Mitchell	adr	x6, cortex_a76_regs
539abbffe98SIsla Mitchell	mrs	x8, CORTEX_A76_CPUECTLR_EL1
540abbffe98SIsla Mitchell	ret
541abbffe98SIsla Mitchellendfunc cortex_a76_cpu_reg_dump
542abbffe98SIsla Mitchell
543fd04156eSArvind Ram Prakashdeclare_cpu_ops cortex_a76, CORTEX_A76_MIDR, \
544d6b79809SDimitris Papastamos	cortex_a76_reset_func, \
545abbffe98SIsla Mitchell	cortex_a76_core_pwr_dwn
546