xref: /rk3399_ARM-atf/lib/cpus/aarch64/cortex_a76.S (revision 6fb2dbd2524f40134a8e003ad4c1a11a6ee8a001)
1/*
2 * Copyright (c) 2017-2023, Arm Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <common/bl_common.h>
10#include <cortex_a76.h>
11#include <cpu_macros.S>
12#include <plat_macros.S>
13#include <services/arm_arch_svc.h>
14#include "wa_cve_2022_23960_bhb.S"
15
16/* Hardware handled coherency */
17#if HW_ASSISTED_COHERENCY == 0
18#error "Cortex-A76 must be compiled with HW_ASSISTED_COHERENCY enabled"
19#endif
20	.globl cortex_a76_reset_func
21	.globl cortex_a76_core_pwr_dwn
22	.globl cortex_a76_disable_wa_cve_2018_3639
23
24/* 64-bit only core */
25#if CTX_INCLUDE_AARCH32_REGS == 1
26#error "Cortex-A76 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
27#endif
28
29#define ESR_EL3_A64_SMC0	0x5e000000
30#define ESR_EL3_A32_SMC0	0x4e000000
31
32#if DYNAMIC_WORKAROUND_CVE_2018_3639
33	/*
34	 * This macro applies the mitigation for CVE-2018-3639.
35	 * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
36	 * SMC calls from a lower EL running in AArch32 or AArch64
37	 * will go through the fast and return early.
38	 *
39	 * The macro saves x2-x3 to the context. In the fast path
40	 * x0-x3 registers do not need to be restored as the calling
41	 * context will have saved them. The macro also saves
42	 * x29-x30 to the context in the sync_exception path.
43	 */
44	.macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
45	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
46	.if \_is_sync_exception
47	stp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
48	mov_imm	w2, \_esr_el3_val
49	bl	apply_cve_2018_3639_sync_wa
50	ldp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
51	.endif
52	/*
53	 * Always enable v4 mitigation during EL3 execution. This is not
54	 * required for the fast path above because it does not perform any
55	 * memory loads.
56	 */
57	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
58	orr	x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
59	msr	CORTEX_A76_CPUACTLR2_EL1, x2
60	isb
61
62	/*
63	 * The caller may have passed arguments to EL3 via x2-x3.
64	 * Restore these registers from the context before jumping to the
65	 * main runtime vector table entry.
66	 */
67	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
68	.endm
69#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
70
71#if DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960
72vector_base cortex_a76_wa_cve_vbar
73
74	/* ---------------------------------------------------------------------
75	 * Current EL with SP_EL0 : 0x0 - 0x200
76	 * ---------------------------------------------------------------------
77	 */
78vector_entry cortex_a76_sync_exception_sp_el0
79	b	sync_exception_sp_el0
80end_vector_entry cortex_a76_sync_exception_sp_el0
81
82vector_entry cortex_a76_irq_sp_el0
83	b	irq_sp_el0
84end_vector_entry cortex_a76_irq_sp_el0
85
86vector_entry cortex_a76_fiq_sp_el0
87	b	fiq_sp_el0
88end_vector_entry cortex_a76_fiq_sp_el0
89
90vector_entry cortex_a76_serror_sp_el0
91	b	serror_sp_el0
92end_vector_entry cortex_a76_serror_sp_el0
93
94	/* ---------------------------------------------------------------------
95	 * Current EL with SP_ELx: 0x200 - 0x400
96	 * ---------------------------------------------------------------------
97	 */
98vector_entry cortex_a76_sync_exception_sp_elx
99	b	sync_exception_sp_elx
100end_vector_entry cortex_a76_sync_exception_sp_elx
101
102vector_entry cortex_a76_irq_sp_elx
103	b	irq_sp_elx
104end_vector_entry cortex_a76_irq_sp_elx
105
106vector_entry cortex_a76_fiq_sp_elx
107	b	fiq_sp_elx
108end_vector_entry cortex_a76_fiq_sp_elx
109
110vector_entry cortex_a76_serror_sp_elx
111	b	serror_sp_elx
112end_vector_entry cortex_a76_serror_sp_elx
113
114	/* ---------------------------------------------------------------------
115	 * Lower EL using AArch64 : 0x400 - 0x600
116	 * ---------------------------------------------------------------------
117	 */
118vector_entry cortex_a76_sync_exception_aarch64
119
120#if WORKAROUND_CVE_2022_23960
121	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
122#endif /* WORKAROUND_CVE_2022_23960 */
123
124#if DYNAMIC_WORKAROUND_CVE_2018_3639
125	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
126#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
127
128	b	sync_exception_aarch64
129end_vector_entry cortex_a76_sync_exception_aarch64
130
131vector_entry cortex_a76_irq_aarch64
132
133#if WORKAROUND_CVE_2022_23960
134	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
135#endif /* WORKAROUND_CVE_2022_23960 */
136
137#if DYNAMIC_WORKAROUND_CVE_2018_3639
138	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
139#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
140
141	b	irq_aarch64
142end_vector_entry cortex_a76_irq_aarch64
143
144vector_entry cortex_a76_fiq_aarch64
145
146#if WORKAROUND_CVE_2022_23960
147	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
148#endif /* WORKAROUND_CVE_2022_23960 */
149
150#if DYNAMIC_WORKAROUND_CVE_2018_3639
151	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
152#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
153
154	b	fiq_aarch64
155end_vector_entry cortex_a76_fiq_aarch64
156
157vector_entry cortex_a76_serror_aarch64
158
159#if WORKAROUND_CVE_2022_23960
160	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
161#endif /* WORKAROUND_CVE_2022_23960 */
162
163#if DYNAMIC_WORKAROUND_CVE_2018_3639
164	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
165#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
166
167	b	serror_aarch64
168end_vector_entry cortex_a76_serror_aarch64
169
170	/* ---------------------------------------------------------------------
171	 * Lower EL using AArch32 : 0x600 - 0x800
172	 * ---------------------------------------------------------------------
173	 */
174vector_entry cortex_a76_sync_exception_aarch32
175
176#if WORKAROUND_CVE_2022_23960
177	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
178#endif /* WORKAROUND_CVE_2022_23960 */
179
180#if DYNAMIC_WORKAROUND_CVE_2018_3639
181	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
182#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
183
184	b	sync_exception_aarch32
185end_vector_entry cortex_a76_sync_exception_aarch32
186
187vector_entry cortex_a76_irq_aarch32
188
189#if WORKAROUND_CVE_2022_23960
190	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
191#endif /* WORKAROUND_CVE_2022_23960 */
192
193#if DYNAMIC_WORKAROUND_CVE_2018_3639
194	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
195#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
196
197	b	irq_aarch32
198end_vector_entry cortex_a76_irq_aarch32
199
200vector_entry cortex_a76_fiq_aarch32
201
202#if WORKAROUND_CVE_2022_23960
203	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
204#endif /* WORKAROUND_CVE_2022_23960 */
205
206#if DYNAMIC_WORKAROUND_CVE_2018_3639
207	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
208#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
209
210	b	fiq_aarch32
211end_vector_entry cortex_a76_fiq_aarch32
212
213vector_entry cortex_a76_serror_aarch32
214
215#if WORKAROUND_CVE_2022_23960
216	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
217#endif /* WORKAROUND_CVE_2022_23960 */
218
219#if DYNAMIC_WORKAROUND_CVE_2018_3639
220	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
221#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
222
223	b	serror_aarch32
224end_vector_entry cortex_a76_serror_aarch32
225#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960 */
226
227#if DYNAMIC_WORKAROUND_CVE_2018_3639
228	/*
229	 * -----------------------------------------------------------------
230	 * This function applies the mitigation for CVE-2018-3639
231	 * specifically for sync exceptions. It implements a fast path
232	 * where `SMCCC_ARCH_WORKAROUND_2` SMC calls from a lower EL
233	 * running in AArch64 will go through the fast and return early.
234	 *
235	 * In the fast path x0-x3 registers do not need to be restored as the
236	 * calling context will have saved them.
237	 *
238	 * Caller must pass value of esr_el3 to compare via x2.
239	 * Save and restore these registers outside of this function from the
240	 * context before jumping to the main runtime vector table entry.
241	 *
242	 * Shall clobber: x0-x3, x30
243	 * -----------------------------------------------------------------
244	 */
245func apply_cve_2018_3639_sync_wa
246	/*
247	 * Ensure SMC is coming from A64/A32 state on #0
248	 * with W0 = SMCCC_ARCH_WORKAROUND_2
249	 *
250	 * This sequence evaluates as:
251	 *    (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
252	 * allowing use of a single branch operation
253	 * X2 populated outside this function with the SMC FID.
254	 */
255	orr	w3, wzr, #SMCCC_ARCH_WORKAROUND_2
256	cmp	x0, x3
257	mrs	x3, esr_el3
258
259	ccmp	w2, w3, #0, eq
260	/*
261	 * Static predictor will predict a fall-through, optimizing
262	 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
263	 */
264	bne	1f
265
266	/*
267	* The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
268	* fast path.
269	*/
270	cmp	x1, xzr /* enable/disable check */
271
272	/*
273	 * When the calling context wants mitigation disabled,
274	 * we program the mitigation disable function in the
275	 * CPU context, which gets invoked on subsequent exits from
276	 * EL3 via the `el3_exit` function. Otherwise NULL is
277	 * programmed in the CPU context, which results in caller's
278	 * inheriting the EL3 mitigation state (enabled) on subsequent
279	 * `el3_exit`.
280	 */
281	mov	x0, xzr
282	adr	x1, cortex_a76_disable_wa_cve_2018_3639
283	csel	x1, x1, x0, eq
284	str	x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
285
286	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
287	orr	x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
288	bic	x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
289	csel	x3, x3, x1, eq
290	msr	CORTEX_A76_CPUACTLR2_EL1, x3
291	ldp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
292	/*
293	* `SMCCC_ARCH_WORKAROUND_2`fast path return to lower EL.
294	*/
295	exception_return /* exception_return contains ISB */
2961:
297	ret
298endfunc apply_cve_2018_3639_sync_wa
299#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
300
301workaround_reset_start cortex_a76, ERRATUM(1073348), ERRATA_A76_1073348
302	mrs	x1, CORTEX_A76_CPUACTLR_EL1
303	orr	x1, x1 ,#CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
304	msr	CORTEX_A76_CPUACTLR_EL1, x1
305workaround_reset_end cortex_a76, ERRATUM(1073348)
306
307check_erratum_ls cortex_a76, ERRATUM(1073348), CPU_REV(1, 0)
308
309workaround_reset_start cortex_a76, ERRATUM(1130799), ERRATA_A76_1130799
310	mrs	x1, CORTEX_A76_CPUACTLR2_EL1
311	orr	x1, x1 ,#CORTEX_A76_CPUACTLR2_EL1_BIT_59
312	msr	CORTEX_A76_CPUACTLR2_EL1, x1
313workaround_reset_end cortex_a76, ERRATUM(1130799)
314
315check_erratum_ls cortex_a76, ERRATUM(1130799), CPU_REV(2, 0)
316
317workaround_reset_start cortex_a76, ERRATUM(1220197), ERRATA_A76_1220197
318	mrs	x1, CORTEX_A76_CPUECTLR_EL1
319	orr	x1, x1, #CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
320	msr	CORTEX_A76_CPUECTLR_EL1, x1
321workaround_reset_end cortex_a76, ERRATUM(1220197)
322
323check_erratum_ls cortex_a76, ERRATUM(1220197), CPU_REV(2, 0)
324
325workaround_reset_start cortex_a76, ERRATUM(1257314), ERRATA_A76_1257314
326	mrs	x1, CORTEX_A76_CPUACTLR3_EL1
327	orr	x1, x1, CORTEX_A76_CPUACTLR3_EL1_BIT_10
328	msr	CORTEX_A76_CPUACTLR3_EL1, x1
329workaround_reset_end cortex_a76, ERRATUM(1257314)
330
331check_erratum_ls cortex_a76, ERRATUM(1257314), CPU_REV(3, 0)
332
333workaround_reset_start cortex_a76, ERRATUM(1262606), ERRATA_A76_1262606
334	mrs	x1, CORTEX_A76_CPUACTLR_EL1
335	orr	x1, x1, #CORTEX_A76_CPUACTLR_EL1_BIT_13
336	msr	CORTEX_A76_CPUACTLR_EL1, x1
337workaround_reset_end cortex_a76, ERRATUM(1262606)
338
339check_erratum_ls cortex_a76, ERRATUM(1262606), CPU_REV(3, 0)
340
341workaround_reset_start cortex_a76, ERRATUM(1262888), ERRATA_A76_1262888
342	mrs	x1, CORTEX_A76_CPUECTLR_EL1
343	orr	x1, x1, CORTEX_A76_CPUECTLR_EL1_BIT_51
344	msr	CORTEX_A76_CPUECTLR_EL1, x1
345workaround_reset_end cortex_a76, ERRATUM(1262888)
346
347check_erratum_ls cortex_a76, ERRATUM(1262888), CPU_REV(3, 0)
348
349workaround_reset_start cortex_a76, ERRATUM(1275112), ERRATA_A76_1275112
350	mrs	x1, CORTEX_A76_CPUACTLR_EL1
351	orr	x1, x1, #CORTEX_A76_CPUACTLR_EL1_BIT_13
352	msr	CORTEX_A76_CPUACTLR_EL1, x1
353workaround_reset_end cortex_a76, ERRATUM(1275112)
354
355check_erratum_ls cortex_a76, ERRATUM(1275112), CPU_REV(3, 0)
356
357check_erratum_custom_start cortex_a76, ERRATUM(1286807)
358#if ERRATA_A76_1286807
359	mov x0, #ERRATA_APPLIES
360	ret
361#else
362	mov	x1, #0x30
363	b	cpu_rev_var_ls
364#endif
365check_erratum_custom_end cortex_a76, ERRATUM(1286807)
366
367workaround_reset_start cortex_a76, ERRATUM(1791580), ERRATA_A76_1791580
368	mrs	x1, CORTEX_A76_CPUACTLR2_EL1
369	orr	x1, x1, CORTEX_A76_CPUACTLR2_EL1_BIT_2
370	msr	CORTEX_A76_CPUACTLR2_EL1, x1
371workaround_reset_end cortex_a76, ERRATUM(1791580)
372
373check_erratum_ls cortex_a76, ERRATUM(1791580), CPU_REV(4, 0)
374
375workaround_reset_start cortex_a76, ERRATUM(1868343), ERRATA_A76_1868343
376	mrs	x1, CORTEX_A76_CPUACTLR_EL1
377	orr	x1, x1, #CORTEX_A76_CPUACTLR_EL1_BIT_13
378	msr	CORTEX_A76_CPUACTLR_EL1, x1
379workaround_reset_end cortex_a76, ERRATUM(1868343)
380
381check_erratum_ls cortex_a76, ERRATUM(1868343), CPU_REV(4, 0)
382
383workaround_reset_start cortex_a76, ERRATUM(1946160), ERRATA_A76_1946160
384	mov	x0, #3
385	msr	S3_6_C15_C8_0, x0
386	ldr	x0, =0x10E3900002
387	msr	S3_6_C15_C8_2, x0
388	ldr	x0, =0x10FFF00083
389	msr	S3_6_C15_C8_3, x0
390	ldr	x0, =0x2001003FF
391	msr	S3_6_C15_C8_1, x0
392
393	mov	x0, #4
394	msr	S3_6_C15_C8_0, x0
395	ldr	x0, =0x10E3800082
396	msr	S3_6_C15_C8_2, x0
397	ldr	x0, =0x10FFF00083
398	msr	S3_6_C15_C8_3, x0
399	ldr	x0, =0x2001003FF
400	msr	S3_6_C15_C8_1, x0
401
402	mov	x0, #5
403	msr	S3_6_C15_C8_0, x0
404	ldr	x0, =0x10E3800200
405	msr	S3_6_C15_C8_2, x0
406	ldr	x0, =0x10FFF003E0
407	msr	S3_6_C15_C8_3, x0
408	ldr	x0, =0x2001003FF
409	msr	S3_6_C15_C8_1, x0
410workaround_reset_end cortex_a76, ERRATUM(1946160)
411
412check_erratum_range cortex_a76, ERRATUM(1946160), CPU_REV(3, 0), CPU_REV(4, 1)
413
414workaround_runtime_start cortex_a76, ERRATUM(2743102), ERRATA_A76_2743102
415	/* dsb before isb of power down sequence */
416	dsb	sy
417workaround_runtime_end cortex_a76, ERRATUM(2743102)
418
419check_erratum_ls cortex_a76, ERRATUM(2743102), CPU_REV(4, 1)
420
421check_erratum_chosen cortex_a76, CVE(2018, 3639), WORKAROUND_CVE_2018_3639
422
423func cortex_a76_disable_wa_cve_2018_3639
424	mrs	x0, CORTEX_A76_CPUACTLR2_EL1
425	bic	x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
426	msr	CORTEX_A76_CPUACTLR2_EL1, x0
427	isb
428	ret
429endfunc cortex_a76_disable_wa_cve_2018_3639
430
431/* --------------------------------------------------------------
432 * Errata Workaround for Cortex A76 Errata #1165522.
433 * This applies only to revisions <= r3p0 of Cortex A76.
434 * Due to the nature of the errata it is applied unconditionally
435 * when built in, report it as applicable in this case
436 * --------------------------------------------------------------
437 */
438check_erratum_custom_start cortex_a76, ERRATUM(1165522)
439#if ERRATA_A76_1165522
440	mov	x0, #ERRATA_APPLIES
441	ret
442#else
443	mov	x1, #0x30
444	b	cpu_rev_var_ls
445#endif
446check_erratum_custom_end cortex_a76, ERRATUM(1165522)
447
448check_erratum_chosen cortex_a76, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
449
450/* erratum has no workaround in the cpu. Generic code must take care */
451add_erratum_entry cortex_a76, CVE(2022, 23960), WORKAROUND_CVE_2022_23960, NO_APPLY_AT_RESET
452
453/* ERRATA_DSU_798953 :
454 * The errata is defined in dsu_helpers.S but applies to cortex_a76
455 * as well. Henceforth creating symbolic names to the already existing errata
456 * workaround functions to get them registered under the Errata Framework.
457 */
458.equ check_erratum_cortex_a76_798953, check_errata_dsu_798953
459.equ erratum_cortex_a76_798953_wa, errata_dsu_798953_wa
460add_erratum_entry cortex_a76, ERRATUM(798953), ERRATA_DSU_798953, APPLY_AT_RESET
461
462/* ERRATA_DSU_936184 :
463 * The errata is defined in dsu_helpers.S but applies to cortex_a76
464 * as well. Henceforth creating symbolic names to the already existing errata
465 * workaround functions to get them registered under the Errata Framework.
466 */
467.equ check_erratum_cortex_a76_936184, check_errata_dsu_936184
468.equ erratum_cortex_a76_936184_wa, errata_dsu_936184_wa
469add_erratum_entry cortex_a76, ERRATUM(936184), ERRATA_DSU_936184, APPLY_AT_RESET
470
471cpu_reset_func_start cortex_a76
472
473#if WORKAROUND_CVE_2018_3639
474	/* If the PE implements SSBS, we don't need the dynamic workaround */
475	mrs	x0, id_aa64pfr1_el1
476	lsr	x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
477	and	x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
478#if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
479	cmp	x0, 0
480	ASM_ASSERT(ne)
481#endif
482#if DYNAMIC_WORKAROUND_CVE_2018_3639
483	cbnz	x0, 1f
484	mrs	x0, CORTEX_A76_CPUACTLR2_EL1
485	orr	x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
486	msr	CORTEX_A76_CPUACTLR2_EL1, x0
487	isb
488
489#ifdef IMAGE_BL31
490	/*
491	 * The Cortex-A76 generic vectors are overwritten to use the vectors
492	 * defined above. This is required in order to apply mitigation
493	 * against CVE-2018-3639 on exception entry from lower ELs.
494	 * If the below vector table is used, skip overriding it again for
495	 *  CVE_2022_23960 as both use the same vbar.
496	 */
497	adr	x0, cortex_a76_wa_cve_vbar
498	msr	vbar_el3, x0
499	isb
500	b	2f
501#endif /* IMAGE_BL31 */
502
5031:
504#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
505#endif /* WORKAROUND_CVE_2018_3639 */
506
507#if IMAGE_BL31 && WORKAROUND_CVE_2022_23960
508	/*
509	 * The Cortex-A76 generic vectors are overridden to apply errata
510	 * mitigation on exception entry from lower ELs. This will be bypassed
511	 * if DYNAMIC_WORKAROUND_CVE_2018_3639 has overridden the vectors.
512	 */
513	adr	x0, cortex_a76_wa_cve_vbar
514	msr	vbar_el3, x0
515	isb
516#endif /* IMAGE_BL31 && WORKAROUND_CVE_2022_23960 */
5172:
518cpu_reset_func_end cortex_a76
519
520	/* ---------------------------------------------
521	 * HW will do the cache maintenance while powering down
522	 * ---------------------------------------------
523	 */
524func cortex_a76_core_pwr_dwn
525	/* ---------------------------------------------
526	 * Enable CPU power down bit in power control register
527	 * ---------------------------------------------
528	 */
529	mrs	x0, CORTEX_A76_CPUPWRCTLR_EL1
530	orr	x0, x0, #CORTEX_A76_CORE_PWRDN_EN_MASK
531	msr	CORTEX_A76_CPUPWRCTLR_EL1, x0
532
533	apply_erratum cortex_a76, ERRATUM(2743102), ERRATA_A76_2743102
534
535	isb
536	ret
537endfunc cortex_a76_core_pwr_dwn
538
539errata_report_shim cortex_a76
540
541	/* ---------------------------------------------
542	 * This function provides cortex_a76 specific
543	 * register information for crash reporting.
544	 * It needs to return with x6 pointing to
545	 * a list of register names in ascii and
546	 * x8 - x15 having values of registers to be
547	 * reported.
548	 * ---------------------------------------------
549	 */
550.section .rodata.cortex_a76_regs, "aS"
551cortex_a76_regs:  /* The ascii list of register names to be reported */
552	.asciz	"cpuectlr_el1", ""
553
554func cortex_a76_cpu_reg_dump
555	adr	x6, cortex_a76_regs
556	mrs	x8, CORTEX_A76_CPUECTLR_EL1
557	ret
558endfunc cortex_a76_cpu_reg_dump
559
560declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
561	cortex_a76_reset_func, \
562	CPU_NO_EXTRA1_FUNC, \
563	cortex_a76_disable_wa_cve_2018_3639, \
564	CPU_NO_EXTRA3_FUNC, \
565	cortex_a76_core_pwr_dwn
566