xref: /rk3399_ARM-atf/lib/cpus/aarch64/cortex_a76.S (revision 06f3c7058c42a9f1a9f7df75ea2de71a000855e8)
1/*
2 * Copyright (c) 2017-2025, Arm Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <common/bl_common.h>
10#include <cortex_a76.h>
11#include <cpu_macros.S>
12#include <dsu_macros.S>
13#include <plat_macros.S>
14#include <services/arm_arch_svc.h>
15#include "wa_cve_2022_23960_bhb.S"
16
17/* Hardware handled coherency */
18#if HW_ASSISTED_COHERENCY == 0
19#error "Cortex-A76 must be compiled with HW_ASSISTED_COHERENCY enabled"
20#endif
21	.globl cortex_a76_reset_func
22	.globl cortex_a76_core_pwr_dwn
23	.globl cortex_a76_disable_wa_cve_2018_3639
24
25/* 64-bit only core */
26#if CTX_INCLUDE_AARCH32_REGS == 1
27#error "Cortex-A76 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
28#endif
29
30#define ESR_EL3_A64_SMC0	0x5e000000
31#define ESR_EL3_A32_SMC0	0x4e000000
32
33cpu_reset_prologue cortex_a76
34
35#if DYNAMIC_WORKAROUND_CVE_2018_3639
36	/*
37	 * This macro applies the mitigation for CVE-2018-3639.
38	 * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
39	 * SMC calls from a lower EL running in AArch32 or AArch64
40	 * will go through the fast and return early.
41	 *
42	 * The macro saves x2-x3 to the context. In the fast path
43	 * x0-x3 registers do not need to be restored as the calling
44	 * context will have saved them. The macro also saves
45	 * x29-x30 to the context in the sync_exception path.
46	 */
47	.macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
48	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
49	.if \_is_sync_exception
50	stp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
51	mov_imm	w2, \_esr_el3_val
52	bl	apply_cve_2018_3639_sync_wa
53	ldp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
54	.endif
55	/*
56	 * Always enable v4 mitigation during EL3 execution. This is not
57	 * required for the fast path above because it does not perform any
58	 * memory loads.
59	 */
60	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
61	orr	x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
62	msr	CORTEX_A76_CPUACTLR2_EL1, x2
63	isb
64
65	/*
66	 * The caller may have passed arguments to EL3 via x2-x3.
67	 * Restore these registers from the context before jumping to the
68	 * main runtime vector table entry.
69	 */
70	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
71	.endm
72#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
73
74#if DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960
75vector_base cortex_a76_wa_cve_vbar
76
77	/* ---------------------------------------------------------------------
78	 * Current EL with SP_EL0 : 0x0 - 0x200
79	 * ---------------------------------------------------------------------
80	 */
81vector_entry cortex_a76_sync_exception_sp_el0
82	b	sync_exception_sp_el0
83end_vector_entry cortex_a76_sync_exception_sp_el0
84
85vector_entry cortex_a76_irq_sp_el0
86	b	irq_sp_el0
87end_vector_entry cortex_a76_irq_sp_el0
88
89vector_entry cortex_a76_fiq_sp_el0
90	b	fiq_sp_el0
91end_vector_entry cortex_a76_fiq_sp_el0
92
93vector_entry cortex_a76_serror_sp_el0
94	b	serror_sp_el0
95end_vector_entry cortex_a76_serror_sp_el0
96
97	/* ---------------------------------------------------------------------
98	 * Current EL with SP_ELx: 0x200 - 0x400
99	 * ---------------------------------------------------------------------
100	 */
101vector_entry cortex_a76_sync_exception_sp_elx
102	b	sync_exception_sp_elx
103end_vector_entry cortex_a76_sync_exception_sp_elx
104
105vector_entry cortex_a76_irq_sp_elx
106	b	irq_sp_elx
107end_vector_entry cortex_a76_irq_sp_elx
108
109vector_entry cortex_a76_fiq_sp_elx
110	b	fiq_sp_elx
111end_vector_entry cortex_a76_fiq_sp_elx
112
113vector_entry cortex_a76_serror_sp_elx
114	b	serror_sp_elx
115end_vector_entry cortex_a76_serror_sp_elx
116
117	/* ---------------------------------------------------------------------
118	 * Lower EL using AArch64 : 0x400 - 0x600
119	 * ---------------------------------------------------------------------
120	 */
121vector_entry cortex_a76_sync_exception_aarch64
122
123#if WORKAROUND_CVE_2022_23960
124	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
125#endif /* WORKAROUND_CVE_2022_23960 */
126
127#if DYNAMIC_WORKAROUND_CVE_2018_3639
128	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
129#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
130
131	b	sync_exception_aarch64
132end_vector_entry cortex_a76_sync_exception_aarch64
133
134vector_entry cortex_a76_irq_aarch64
135
136#if WORKAROUND_CVE_2022_23960
137	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
138#endif /* WORKAROUND_CVE_2022_23960 */
139
140#if DYNAMIC_WORKAROUND_CVE_2018_3639
141	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
142#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
143
144	b	irq_aarch64
145end_vector_entry cortex_a76_irq_aarch64
146
147vector_entry cortex_a76_fiq_aarch64
148
149#if WORKAROUND_CVE_2022_23960
150	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
151#endif /* WORKAROUND_CVE_2022_23960 */
152
153#if DYNAMIC_WORKAROUND_CVE_2018_3639
154	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
155#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
156
157	b	fiq_aarch64
158end_vector_entry cortex_a76_fiq_aarch64
159
160vector_entry cortex_a76_serror_aarch64
161
162#if WORKAROUND_CVE_2022_23960
163	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
164#endif /* WORKAROUND_CVE_2022_23960 */
165
166#if DYNAMIC_WORKAROUND_CVE_2018_3639
167	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
168#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
169
170	b	serror_aarch64
171end_vector_entry cortex_a76_serror_aarch64
172
173	/* ---------------------------------------------------------------------
174	 * Lower EL using AArch32 : 0x600 - 0x800
175	 * ---------------------------------------------------------------------
176	 */
177vector_entry cortex_a76_sync_exception_aarch32
178
179#if WORKAROUND_CVE_2022_23960
180	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
181#endif /* WORKAROUND_CVE_2022_23960 */
182
183#if DYNAMIC_WORKAROUND_CVE_2018_3639
184	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
185#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
186
187	b	sync_exception_aarch32
188end_vector_entry cortex_a76_sync_exception_aarch32
189
190vector_entry cortex_a76_irq_aarch32
191
192#if WORKAROUND_CVE_2022_23960
193	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
194#endif /* WORKAROUND_CVE_2022_23960 */
195
196#if DYNAMIC_WORKAROUND_CVE_2018_3639
197	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
198#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
199
200	b	irq_aarch32
201end_vector_entry cortex_a76_irq_aarch32
202
203vector_entry cortex_a76_fiq_aarch32
204
205#if WORKAROUND_CVE_2022_23960
206	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
207#endif /* WORKAROUND_CVE_2022_23960 */
208
209#if DYNAMIC_WORKAROUND_CVE_2018_3639
210	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
211#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
212
213	b	fiq_aarch32
214end_vector_entry cortex_a76_fiq_aarch32
215
216vector_entry cortex_a76_serror_aarch32
217
218#if WORKAROUND_CVE_2022_23960
219	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
220#endif /* WORKAROUND_CVE_2022_23960 */
221
222#if DYNAMIC_WORKAROUND_CVE_2018_3639
223	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
224#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
225
226	b	serror_aarch32
227end_vector_entry cortex_a76_serror_aarch32
228#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960 */
229
230#if DYNAMIC_WORKAROUND_CVE_2018_3639
231	/*
232	 * -----------------------------------------------------------------
233	 * This function applies the mitigation for CVE-2018-3639
234	 * specifically for sync exceptions. It implements a fast path
235	 * where `SMCCC_ARCH_WORKAROUND_2` SMC calls from a lower EL
236	 * running in AArch64 will go through the fast and return early.
237	 *
238	 * In the fast path x0-x3 registers do not need to be restored as the
239	 * calling context will have saved them.
240	 *
241	 * Caller must pass value of esr_el3 to compare via x2.
242	 * Save and restore these registers outside of this function from the
243	 * context before jumping to the main runtime vector table entry.
244	 *
245	 * Shall clobber: x0-x3, x30
246	 * -----------------------------------------------------------------
247	 */
248func apply_cve_2018_3639_sync_wa
249	/*
250	 * Ensure SMC is coming from A64/A32 state on #0
251	 * with W0 = SMCCC_ARCH_WORKAROUND_2
252	 *
253	 * This sequence evaluates as:
254	 *    (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
255	 * allowing use of a single branch operation
256	 * X2 populated outside this function with the SMC FID.
257	 */
258	orr	w3, wzr, #SMCCC_ARCH_WORKAROUND_2
259	cmp	x0, x3
260	mrs	x3, esr_el3
261
262	ccmp	w2, w3, #0, eq
263	/*
264	 * Static predictor will predict a fall-through, optimizing
265	 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
266	 */
267	bne	1f
268
269	/*
270	* The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
271	* fast path.
272	*/
273	cmp	x1, xzr /* enable/disable check */
274
275	/*
276	 * When the calling context wants mitigation disabled,
277	 * we program the mitigation disable function in the
278	 * CPU context, which gets invoked on subsequent exits from
279	 * EL3 via the `el3_exit` function. Otherwise NULL is
280	 * programmed in the CPU context, which results in caller's
281	 * inheriting the EL3 mitigation state (enabled) on subsequent
282	 * `el3_exit`.
283	 */
284	mov	x0, xzr
285	adr	x1, cortex_a76_disable_wa_cve_2018_3639
286	csel	x1, x1, x0, eq
287	str	x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
288
289	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
290	orr	x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
291	bic	x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
292	csel	x3, x3, x1, eq
293	msr	CORTEX_A76_CPUACTLR2_EL1, x3
294	ldp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
295	/*
296	* `SMCCC_ARCH_WORKAROUND_2`fast path return to lower EL.
297	*/
298	exception_return /* exception_return contains ISB */
2991:
300	ret
301endfunc apply_cve_2018_3639_sync_wa
302#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
303
304/* Erratum entry and check function for SMCCC_ARCH_WORKAROUND_2*/
305add_erratum_entry cortex_a76, ERRATUM(ARCH_WORKAROUND_2), WORKAROUND_CVE_2018_3639
306
307check_erratum_chosen cortex_a76, ERRATUM(ARCH_WORKAROUND_2), WORKAROUND_CVE_2018_3639
308
309workaround_reset_start cortex_a76, ERRATUM(798953), ERRATA_DSU_798953
310	errata_dsu_798953_wa_impl
311workaround_reset_end cortex_a76, ERRATUM(798953)
312
313check_erratum_custom_start cortex_a76, ERRATUM(798953)
314	check_errata_dsu_798953_impl
315	ret
316check_erratum_custom_end cortex_a76, ERRATUM(798953)
317
318workaround_reset_start cortex_a76, ERRATUM(936184), ERRATA_DSU_936184
319	errata_dsu_936184_wa_impl
320workaround_reset_end cortex_a76, ERRATUM(936184)
321
322check_erratum_custom_start cortex_a76, ERRATUM(936184)
323	check_errata_dsu_936184_impl
324	ret
325check_erratum_custom_end cortex_a76, ERRATUM(936184)
326
327workaround_reset_start cortex_a76, ERRATUM(1073348), ERRATA_A76_1073348
328	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1 ,CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
329workaround_reset_end cortex_a76, ERRATUM(1073348)
330
331check_erratum_ls cortex_a76, ERRATUM(1073348), CPU_REV(1, 0)
332
333workaround_reset_start cortex_a76, ERRATUM(1130799), ERRATA_A76_1130799
334	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_BIT_59
335	msr	CORTEX_A76_CPUACTLR2_EL1, x1
336workaround_reset_end cortex_a76, ERRATUM(1130799)
337
338check_erratum_ls cortex_a76, ERRATUM(1130799), CPU_REV(2, 0)
339
340/* --------------------------------------------------------------
341 * Errata Workaround for Cortex A76 Errata #1165522.
342 * This applies only to revisions <= r3p0 of Cortex A76.
343 * Due to the nature of the errata it is applied unconditionally
344 * when built in, report it as applicable in this case
345 * --------------------------------------------------------------
346 */
347check_erratum_custom_start cortex_a76, ERRATUM(1165522)
348#if ERRATA_A76_1165522
349	mov	x0, #ERRATA_APPLIES
350#else
351	cpu_rev_var_ls	CPU_REV(3, 0)
352#endif
353	ret
354check_erratum_custom_end cortex_a76, ERRATUM(1165522)
355
356add_erratum_entry cortex_a76, ERRATUM(1165522), ERRATA_A76_1165522
357
358workaround_reset_start cortex_a76, ERRATUM(1220197), ERRATA_A76_1220197
359	sysreg_bit_set CORTEX_A76_CPUECTLR_EL1, CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
360workaround_reset_end cortex_a76, ERRATUM(1220197)
361
362check_erratum_ls cortex_a76, ERRATUM(1220197), CPU_REV(2, 0)
363
364workaround_reset_start cortex_a76, ERRATUM(1257314), ERRATA_A76_1257314
365	sysreg_bit_set CORTEX_A76_CPUACTLR3_EL1, CORTEX_A76_CPUACTLR3_EL1_BIT_10
366workaround_reset_end cortex_a76, ERRATUM(1257314)
367
368check_erratum_ls cortex_a76, ERRATUM(1257314), CPU_REV(3, 0)
369
370workaround_reset_start cortex_a76, ERRATUM(1262606), ERRATA_A76_1262606
371	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
372workaround_reset_end cortex_a76, ERRATUM(1262606)
373
374check_erratum_ls cortex_a76, ERRATUM(1262606), CPU_REV(3, 0)
375
376workaround_reset_start cortex_a76, ERRATUM(1262888), ERRATA_A76_1262888
377	sysreg_bit_set CORTEX_A76_CPUECTLR_EL1, CORTEX_A76_CPUECTLR_EL1_BIT_51
378workaround_reset_end cortex_a76, ERRATUM(1262888)
379
380check_erratum_ls cortex_a76, ERRATUM(1262888), CPU_REV(3, 0)
381
382workaround_reset_start cortex_a76, ERRATUM(1275112), ERRATA_A76_1275112
383	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
384workaround_reset_end cortex_a76, ERRATUM(1275112)
385
386check_erratum_ls cortex_a76, ERRATUM(1275112), CPU_REV(3, 0)
387
388check_erratum_custom_start cortex_a76, ERRATUM(1286807)
389#if ERRATA_A76_1286807
390	mov x0, #ERRATA_APPLIES
391#else
392	cpu_rev_var_ls	CPU_REV(3, 0)
393#endif
394	ret
395check_erratum_custom_end cortex_a76, ERRATUM(1286807)
396
397add_erratum_entry cortex_a76, ERRATUM(1286807), ERRATA_A76_1286807
398
399workaround_reset_start cortex_a76, ERRATUM(1791580), ERRATA_A76_1791580
400	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_BIT_2
401workaround_reset_end cortex_a76, ERRATUM(1791580)
402
403check_erratum_ls cortex_a76, ERRATUM(1791580), CPU_REV(4, 0)
404
405workaround_reset_start cortex_a76, ERRATUM(1868343), ERRATA_A76_1868343
406	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
407workaround_reset_end cortex_a76, ERRATUM(1868343)
408
409check_erratum_ls cortex_a76, ERRATUM(1868343), CPU_REV(4, 0)
410
411workaround_reset_start cortex_a76, ERRATUM(1946160), ERRATA_A76_1946160
412	mov	x0, #3
413	msr	S3_6_C15_C8_0, x0
414	ldr	x0, =0x10E3900002
415	msr	S3_6_C15_C8_2, x0
416	ldr	x0, =0x10FFF00083
417	msr	S3_6_C15_C8_3, x0
418	ldr	x0, =0x2001003FF
419	msr	S3_6_C15_C8_1, x0
420
421	mov	x0, #4
422	msr	S3_6_C15_C8_0, x0
423	ldr	x0, =0x10E3800082
424	msr	S3_6_C15_C8_2, x0
425	ldr	x0, =0x10FFF00083
426	msr	S3_6_C15_C8_3, x0
427	ldr	x0, =0x2001003FF
428	msr	S3_6_C15_C8_1, x0
429
430	mov	x0, #5
431	msr	S3_6_C15_C8_0, x0
432	ldr	x0, =0x10E3800200
433	msr	S3_6_C15_C8_2, x0
434	ldr	x0, =0x10FFF003E0
435	msr	S3_6_C15_C8_3, x0
436	ldr	x0, =0x2001003FF
437	msr	S3_6_C15_C8_1, x0
438workaround_reset_end cortex_a76, ERRATUM(1946160)
439
440check_erratum_range cortex_a76, ERRATUM(1946160), CPU_REV(3, 0), CPU_REV(4, 1)
441
442workaround_runtime_start cortex_a76, ERRATUM(2743102), ERRATA_A76_2743102
443	/* dsb before isb of power down sequence */
444	dsb	sy
445workaround_runtime_end cortex_a76, ERRATUM(2743102)
446
447check_erratum_ls cortex_a76, ERRATUM(2743102), CPU_REV(4, 1)
448
449check_erratum_chosen cortex_a76, CVE(2018, 3639), WORKAROUND_CVE_2018_3639
450
451func cortex_a76_disable_wa_cve_2018_3639
452	sysreg_bit_clear CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
453	isb
454	ret
455endfunc cortex_a76_disable_wa_cve_2018_3639
456
457check_erratum_chosen cortex_a76, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
458
459/* erratum has no workaround in the cpu. Generic code must take care */
460add_erratum_entry cortex_a76, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
461
462cpu_reset_func_start cortex_a76
463
464#if WORKAROUND_CVE_2018_3639
465	/* If the PE implements SSBS, we don't need the dynamic workaround */
466	mrs	x0, id_aa64pfr1_el1
467	lsr	x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
468	and	x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
469#if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
470	cmp	x0, 0
471	ASM_ASSERT(ne)
472#endif
473#if DYNAMIC_WORKAROUND_CVE_2018_3639
474	cbnz	x0, 1f
475	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
476	isb
477
478#ifdef IMAGE_BL31
479	/*
480	 * The Cortex-A76 generic vectors are overwritten to use the vectors
481	 * defined above. This is required in order to apply mitigation
482	 * against CVE-2018-3639 on exception entry from lower ELs.
483	 * If the below vector table is used, skip overriding it again for
484	 *  CVE_2022_23960 as both use the same vbar.
485	 */
486	override_vector_table cortex_a76_wa_cve_vbar
487	isb
488	b	2f
489#endif /* IMAGE_BL31 */
490
4911:
492#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
493#endif /* WORKAROUND_CVE_2018_3639 */
494
495#if IMAGE_BL31 && WORKAROUND_CVE_2022_23960
496	/*
497	 * The Cortex-A76 generic vectors are overridden to apply errata
498	 * mitigation on exception entry from lower ELs. This will be bypassed
499	 * if DYNAMIC_WORKAROUND_CVE_2018_3639 has overridden the vectors.
500	 */
501	override_vector_table cortex_a76_wa_cve_vbar
502	isb
503#endif /* IMAGE_BL31 && WORKAROUND_CVE_2022_23960 */
5042:
505cpu_reset_func_end cortex_a76
506
507	/* ---------------------------------------------
508	 * HW will do the cache maintenance while powering down
509	 * ---------------------------------------------
510	 */
511func cortex_a76_core_pwr_dwn
512	/* ---------------------------------------------
513	 * Enable CPU power down bit in power control register
514	 * ---------------------------------------------
515	 */
516	sysreg_bit_set CORTEX_A76_CPUPWRCTLR_EL1, CORTEX_A76_CORE_PWRDN_EN_MASK
517
518	apply_erratum cortex_a76, ERRATUM(2743102), ERRATA_A76_2743102, NO_GET_CPU_REV
519
520	isb
521	ret
522endfunc cortex_a76_core_pwr_dwn
523
524	/* ---------------------------------------------
525	 * This function provides cortex_a76 specific
526	 * register information for crash reporting.
527	 * It needs to return with x6 pointing to
528	 * a list of register names in ascii and
529	 * x8 - x15 having values of registers to be
530	 * reported.
531	 * ---------------------------------------------
532	 */
533.section .rodata.cortex_a76_regs, "aS"
534cortex_a76_regs:  /* The ascii list of register names to be reported */
535	.asciz	"cpuectlr_el1", ""
536
537func cortex_a76_cpu_reg_dump
538	adr	x6, cortex_a76_regs
539	mrs	x8, CORTEX_A76_CPUECTLR_EL1
540	ret
541endfunc cortex_a76_cpu_reg_dump
542
543declare_cpu_ops cortex_a76, CORTEX_A76_MIDR, \
544	cortex_a76_reset_func, \
545	cortex_a76_core_pwr_dwn
546