xref: /rk3399_ARM-atf/lib/cpus/aarch64/cortex_a76.S (revision 0d020822ae88b8623fa6c9c55973f0045194dcef)
1/*
2 * Copyright (c) 2017-2025, Arm Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <common/bl_common.h>
10#include <cortex_a76.h>
11#include <cpu_macros.S>
12#include <dsu_macros.S>
13#include <plat_macros.S>
14#include <services/arm_arch_svc.h>
15#include "wa_cve_2022_23960_bhb.S"
16
17/* Hardware handled coherency */
18#if HW_ASSISTED_COHERENCY == 0
19#error "Cortex-A76 must be compiled with HW_ASSISTED_COHERENCY enabled"
20#endif
21	.globl cortex_a76_reset_func
22	.globl cortex_a76_core_pwr_dwn
23	.globl cortex_a76_disable_wa_cve_2018_3639
24
25/* 64-bit only core */
26#if CTX_INCLUDE_AARCH32_REGS == 1
27#error "Cortex-A76 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
28#endif
29
30#define ESR_EL3_A64_SMC0	0x5e000000
31#define ESR_EL3_A32_SMC0	0x4e000000
32
33#if DYNAMIC_WORKAROUND_CVE_2018_3639
34	/*
35	 * This macro applies the mitigation for CVE-2018-3639.
36	 * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
37	 * SMC calls from a lower EL running in AArch32 or AArch64
38	 * will go through the fast and return early.
39	 *
40	 * The macro saves x2-x3 to the context. In the fast path
41	 * x0-x3 registers do not need to be restored as the calling
42	 * context will have saved them. The macro also saves
43	 * x29-x30 to the context in the sync_exception path.
44	 */
45	.macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
46	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
47	.if \_is_sync_exception
48	stp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
49	mov_imm	w2, \_esr_el3_val
50	bl	apply_cve_2018_3639_sync_wa
51	ldp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
52	.endif
53	/*
54	 * Always enable v4 mitigation during EL3 execution. This is not
55	 * required for the fast path above because it does not perform any
56	 * memory loads.
57	 */
58	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
59	orr	x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
60	msr	CORTEX_A76_CPUACTLR2_EL1, x2
61	isb
62
63	/*
64	 * The caller may have passed arguments to EL3 via x2-x3.
65	 * Restore these registers from the context before jumping to the
66	 * main runtime vector table entry.
67	 */
68	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
69	.endm
70#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
71
72#if DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960
73vector_base cortex_a76_wa_cve_vbar
74
75	/* ---------------------------------------------------------------------
76	 * Current EL with SP_EL0 : 0x0 - 0x200
77	 * ---------------------------------------------------------------------
78	 */
79vector_entry cortex_a76_sync_exception_sp_el0
80	b	sync_exception_sp_el0
81end_vector_entry cortex_a76_sync_exception_sp_el0
82
83vector_entry cortex_a76_irq_sp_el0
84	b	irq_sp_el0
85end_vector_entry cortex_a76_irq_sp_el0
86
87vector_entry cortex_a76_fiq_sp_el0
88	b	fiq_sp_el0
89end_vector_entry cortex_a76_fiq_sp_el0
90
91vector_entry cortex_a76_serror_sp_el0
92	b	serror_sp_el0
93end_vector_entry cortex_a76_serror_sp_el0
94
95	/* ---------------------------------------------------------------------
96	 * Current EL with SP_ELx: 0x200 - 0x400
97	 * ---------------------------------------------------------------------
98	 */
99vector_entry cortex_a76_sync_exception_sp_elx
100	b	sync_exception_sp_elx
101end_vector_entry cortex_a76_sync_exception_sp_elx
102
103vector_entry cortex_a76_irq_sp_elx
104	b	irq_sp_elx
105end_vector_entry cortex_a76_irq_sp_elx
106
107vector_entry cortex_a76_fiq_sp_elx
108	b	fiq_sp_elx
109end_vector_entry cortex_a76_fiq_sp_elx
110
111vector_entry cortex_a76_serror_sp_elx
112	b	serror_sp_elx
113end_vector_entry cortex_a76_serror_sp_elx
114
115	/* ---------------------------------------------------------------------
116	 * Lower EL using AArch64 : 0x400 - 0x600
117	 * ---------------------------------------------------------------------
118	 */
119vector_entry cortex_a76_sync_exception_aarch64
120
121#if WORKAROUND_CVE_2022_23960
122	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
123#endif /* WORKAROUND_CVE_2022_23960 */
124
125#if DYNAMIC_WORKAROUND_CVE_2018_3639
126	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
127#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
128
129	b	sync_exception_aarch64
130end_vector_entry cortex_a76_sync_exception_aarch64
131
132vector_entry cortex_a76_irq_aarch64
133
134#if WORKAROUND_CVE_2022_23960
135	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
136#endif /* WORKAROUND_CVE_2022_23960 */
137
138#if DYNAMIC_WORKAROUND_CVE_2018_3639
139	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
140#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
141
142	b	irq_aarch64
143end_vector_entry cortex_a76_irq_aarch64
144
145vector_entry cortex_a76_fiq_aarch64
146
147#if WORKAROUND_CVE_2022_23960
148	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
149#endif /* WORKAROUND_CVE_2022_23960 */
150
151#if DYNAMIC_WORKAROUND_CVE_2018_3639
152	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
153#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
154
155	b	fiq_aarch64
156end_vector_entry cortex_a76_fiq_aarch64
157
158vector_entry cortex_a76_serror_aarch64
159
160#if WORKAROUND_CVE_2022_23960
161	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
162#endif /* WORKAROUND_CVE_2022_23960 */
163
164#if DYNAMIC_WORKAROUND_CVE_2018_3639
165	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
166#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
167
168	b	serror_aarch64
169end_vector_entry cortex_a76_serror_aarch64
170
171	/* ---------------------------------------------------------------------
172	 * Lower EL using AArch32 : 0x600 - 0x800
173	 * ---------------------------------------------------------------------
174	 */
175vector_entry cortex_a76_sync_exception_aarch32
176
177#if WORKAROUND_CVE_2022_23960
178	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
179#endif /* WORKAROUND_CVE_2022_23960 */
180
181#if DYNAMIC_WORKAROUND_CVE_2018_3639
182	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
183#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
184
185	b	sync_exception_aarch32
186end_vector_entry cortex_a76_sync_exception_aarch32
187
188vector_entry cortex_a76_irq_aarch32
189
190#if WORKAROUND_CVE_2022_23960
191	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
192#endif /* WORKAROUND_CVE_2022_23960 */
193
194#if DYNAMIC_WORKAROUND_CVE_2018_3639
195	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
196#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
197
198	b	irq_aarch32
199end_vector_entry cortex_a76_irq_aarch32
200
201vector_entry cortex_a76_fiq_aarch32
202
203#if WORKAROUND_CVE_2022_23960
204	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
205#endif /* WORKAROUND_CVE_2022_23960 */
206
207#if DYNAMIC_WORKAROUND_CVE_2018_3639
208	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
209#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
210
211	b	fiq_aarch32
212end_vector_entry cortex_a76_fiq_aarch32
213
214vector_entry cortex_a76_serror_aarch32
215
216#if WORKAROUND_CVE_2022_23960
217	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
218#endif /* WORKAROUND_CVE_2022_23960 */
219
220#if DYNAMIC_WORKAROUND_CVE_2018_3639
221	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
222#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
223
224	b	serror_aarch32
225end_vector_entry cortex_a76_serror_aarch32
226#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960 */
227
228#if DYNAMIC_WORKAROUND_CVE_2018_3639
229	/*
230	 * -----------------------------------------------------------------
231	 * This function applies the mitigation for CVE-2018-3639
232	 * specifically for sync exceptions. It implements a fast path
233	 * where `SMCCC_ARCH_WORKAROUND_2` SMC calls from a lower EL
234	 * running in AArch64 will go through the fast and return early.
235	 *
236	 * In the fast path x0-x3 registers do not need to be restored as the
237	 * calling context will have saved them.
238	 *
239	 * Caller must pass value of esr_el3 to compare via x2.
240	 * Save and restore these registers outside of this function from the
241	 * context before jumping to the main runtime vector table entry.
242	 *
243	 * Shall clobber: x0-x3, x30
244	 * -----------------------------------------------------------------
245	 */
246func apply_cve_2018_3639_sync_wa
247	/*
248	 * Ensure SMC is coming from A64/A32 state on #0
249	 * with W0 = SMCCC_ARCH_WORKAROUND_2
250	 *
251	 * This sequence evaluates as:
252	 *    (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
253	 * allowing use of a single branch operation
254	 * X2 populated outside this function with the SMC FID.
255	 */
256	orr	w3, wzr, #SMCCC_ARCH_WORKAROUND_2
257	cmp	x0, x3
258	mrs	x3, esr_el3
259
260	ccmp	w2, w3, #0, eq
261	/*
262	 * Static predictor will predict a fall-through, optimizing
263	 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
264	 */
265	bne	1f
266
267	/*
268	* The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
269	* fast path.
270	*/
271	cmp	x1, xzr /* enable/disable check */
272
273	/*
274	 * When the calling context wants mitigation disabled,
275	 * we program the mitigation disable function in the
276	 * CPU context, which gets invoked on subsequent exits from
277	 * EL3 via the `el3_exit` function. Otherwise NULL is
278	 * programmed in the CPU context, which results in caller's
279	 * inheriting the EL3 mitigation state (enabled) on subsequent
280	 * `el3_exit`.
281	 */
282	mov	x0, xzr
283	adr	x1, cortex_a76_disable_wa_cve_2018_3639
284	csel	x1, x1, x0, eq
285	str	x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
286
287	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
288	orr	x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
289	bic	x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
290	csel	x3, x3, x1, eq
291	msr	CORTEX_A76_CPUACTLR2_EL1, x3
292	ldp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
293	/*
294	* `SMCCC_ARCH_WORKAROUND_2`fast path return to lower EL.
295	*/
296	exception_return /* exception_return contains ISB */
2971:
298	ret
299endfunc apply_cve_2018_3639_sync_wa
300#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
301
302workaround_reset_start cortex_a76, ERRATUM(1073348), ERRATA_A76_1073348
303	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1 ,CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
304workaround_reset_end cortex_a76, ERRATUM(1073348)
305
306check_erratum_ls cortex_a76, ERRATUM(1073348), CPU_REV(1, 0)
307
308workaround_reset_start cortex_a76, ERRATUM(1130799), ERRATA_A76_1130799
309	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_BIT_59
310	msr	CORTEX_A76_CPUACTLR2_EL1, x1
311workaround_reset_end cortex_a76, ERRATUM(1130799)
312
313check_erratum_ls cortex_a76, ERRATUM(1130799), CPU_REV(2, 0)
314
315workaround_reset_start cortex_a76, ERRATUM(1220197), ERRATA_A76_1220197
316	sysreg_bit_set CORTEX_A76_CPUECTLR_EL1, CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
317workaround_reset_end cortex_a76, ERRATUM(1220197)
318
319check_erratum_ls cortex_a76, ERRATUM(1220197), CPU_REV(2, 0)
320
321workaround_reset_start cortex_a76, ERRATUM(1257314), ERRATA_A76_1257314
322	sysreg_bit_set CORTEX_A76_CPUACTLR3_EL1, CORTEX_A76_CPUACTLR3_EL1_BIT_10
323workaround_reset_end cortex_a76, ERRATUM(1257314)
324
325check_erratum_ls cortex_a76, ERRATUM(1257314), CPU_REV(3, 0)
326
327workaround_reset_start cortex_a76, ERRATUM(1262606), ERRATA_A76_1262606
328	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
329workaround_reset_end cortex_a76, ERRATUM(1262606)
330
331check_erratum_ls cortex_a76, ERRATUM(1262606), CPU_REV(3, 0)
332
333workaround_reset_start cortex_a76, ERRATUM(1262888), ERRATA_A76_1262888
334	sysreg_bit_set CORTEX_A76_CPUECTLR_EL1, CORTEX_A76_CPUECTLR_EL1_BIT_51
335workaround_reset_end cortex_a76, ERRATUM(1262888)
336
337check_erratum_ls cortex_a76, ERRATUM(1262888), CPU_REV(3, 0)
338
339workaround_reset_start cortex_a76, ERRATUM(1275112), ERRATA_A76_1275112
340	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
341workaround_reset_end cortex_a76, ERRATUM(1275112)
342
343check_erratum_ls cortex_a76, ERRATUM(1275112), CPU_REV(3, 0)
344
345check_erratum_custom_start cortex_a76, ERRATUM(1286807)
346#if ERRATA_A76_1286807
347	mov x0, #ERRATA_APPLIES
348#else
349	cpu_rev_var_ls	CPU_REV(3, 0)
350#endif
351	ret
352check_erratum_custom_end cortex_a76, ERRATUM(1286807)
353
354workaround_reset_start cortex_a76, ERRATUM(1791580), ERRATA_A76_1791580
355	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_BIT_2
356workaround_reset_end cortex_a76, ERRATUM(1791580)
357
358check_erratum_ls cortex_a76, ERRATUM(1791580), CPU_REV(4, 0)
359
360workaround_reset_start cortex_a76, ERRATUM(1868343), ERRATA_A76_1868343
361	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
362workaround_reset_end cortex_a76, ERRATUM(1868343)
363
364check_erratum_ls cortex_a76, ERRATUM(1868343), CPU_REV(4, 0)
365
366workaround_reset_start cortex_a76, ERRATUM(1946160), ERRATA_A76_1946160
367	mov	x0, #3
368	msr	S3_6_C15_C8_0, x0
369	ldr	x0, =0x10E3900002
370	msr	S3_6_C15_C8_2, x0
371	ldr	x0, =0x10FFF00083
372	msr	S3_6_C15_C8_3, x0
373	ldr	x0, =0x2001003FF
374	msr	S3_6_C15_C8_1, x0
375
376	mov	x0, #4
377	msr	S3_6_C15_C8_0, x0
378	ldr	x0, =0x10E3800082
379	msr	S3_6_C15_C8_2, x0
380	ldr	x0, =0x10FFF00083
381	msr	S3_6_C15_C8_3, x0
382	ldr	x0, =0x2001003FF
383	msr	S3_6_C15_C8_1, x0
384
385	mov	x0, #5
386	msr	S3_6_C15_C8_0, x0
387	ldr	x0, =0x10E3800200
388	msr	S3_6_C15_C8_2, x0
389	ldr	x0, =0x10FFF003E0
390	msr	S3_6_C15_C8_3, x0
391	ldr	x0, =0x2001003FF
392	msr	S3_6_C15_C8_1, x0
393workaround_reset_end cortex_a76, ERRATUM(1946160)
394
395check_erratum_range cortex_a76, ERRATUM(1946160), CPU_REV(3, 0), CPU_REV(4, 1)
396
397workaround_runtime_start cortex_a76, ERRATUM(2743102), ERRATA_A76_2743102
398	/* dsb before isb of power down sequence */
399	dsb	sy
400workaround_runtime_end cortex_a76, ERRATUM(2743102)
401
402check_erratum_ls cortex_a76, ERRATUM(2743102), CPU_REV(4, 1)
403
404check_erratum_chosen cortex_a76, CVE(2018, 3639), WORKAROUND_CVE_2018_3639
405
406func cortex_a76_disable_wa_cve_2018_3639
407	sysreg_bit_clear CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
408	isb
409	ret
410endfunc cortex_a76_disable_wa_cve_2018_3639
411
412/* --------------------------------------------------------------
413 * Errata Workaround for Cortex A76 Errata #1165522.
414 * This applies only to revisions <= r3p0 of Cortex A76.
415 * Due to the nature of the errata it is applied unconditionally
416 * when built in, report it as applicable in this case
417 * --------------------------------------------------------------
418 */
419check_erratum_custom_start cortex_a76, ERRATUM(1165522)
420#if ERRATA_A76_1165522
421	mov	x0, #ERRATA_APPLIES
422#else
423	cpu_rev_var_ls	CPU_REV(3, 0)
424#endif
425	ret
426check_erratum_custom_end cortex_a76, ERRATUM(1165522)
427
428check_erratum_chosen cortex_a76, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
429
430/* erratum has no workaround in the cpu. Generic code must take care */
431add_erratum_entry cortex_a76, CVE(2022, 23960), WORKAROUND_CVE_2022_23960, NO_APPLY_AT_RESET
432
433workaround_reset_start cortex_a76, ERRATUM(798953), ERRATA_DSU_798953
434	errata_dsu_798953_wa_impl
435workaround_reset_end cortex_a76, ERRATUM(798953)
436
437check_erratum_custom_start cortex_a76, ERRATUM(798953)
438	check_errata_dsu_798953_impl
439	ret
440check_erratum_custom_end cortex_a76, ERRATUM(798953)
441
442workaround_reset_start cortex_a76, ERRATUM(936184), ERRATA_DSU_936184
443	errata_dsu_936184_wa_impl
444workaround_reset_end cortex_a76, ERRATUM(936184)
445
446check_erratum_custom_start cortex_a76, ERRATUM(936184)
447	check_errata_dsu_936184_impl
448	ret
449check_erratum_custom_end cortex_a76, ERRATUM(936184)
450
451cpu_reset_func_start cortex_a76
452
453#if WORKAROUND_CVE_2018_3639
454	/* If the PE implements SSBS, we don't need the dynamic workaround */
455	mrs	x0, id_aa64pfr1_el1
456	lsr	x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
457	and	x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
458#if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
459	cmp	x0, 0
460	ASM_ASSERT(ne)
461#endif
462#if DYNAMIC_WORKAROUND_CVE_2018_3639
463	cbnz	x0, 1f
464	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
465	isb
466
467#ifdef IMAGE_BL31
468	/*
469	 * The Cortex-A76 generic vectors are overwritten to use the vectors
470	 * defined above. This is required in order to apply mitigation
471	 * against CVE-2018-3639 on exception entry from lower ELs.
472	 * If the below vector table is used, skip overriding it again for
473	 *  CVE_2022_23960 as both use the same vbar.
474	 */
475	override_vector_table cortex_a76_wa_cve_vbar
476	isb
477	b	2f
478#endif /* IMAGE_BL31 */
479
4801:
481#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
482#endif /* WORKAROUND_CVE_2018_3639 */
483
484#if IMAGE_BL31 && WORKAROUND_CVE_2022_23960
485	/*
486	 * The Cortex-A76 generic vectors are overridden to apply errata
487	 * mitigation on exception entry from lower ELs. This will be bypassed
488	 * if DYNAMIC_WORKAROUND_CVE_2018_3639 has overridden the vectors.
489	 */
490	override_vector_table cortex_a76_wa_cve_vbar
491	isb
492#endif /* IMAGE_BL31 && WORKAROUND_CVE_2022_23960 */
4932:
494cpu_reset_func_end cortex_a76
495
496	/* ---------------------------------------------
497	 * HW will do the cache maintenance while powering down
498	 * ---------------------------------------------
499	 */
500func cortex_a76_core_pwr_dwn
501	/* ---------------------------------------------
502	 * Enable CPU power down bit in power control register
503	 * ---------------------------------------------
504	 */
505	sysreg_bit_set CORTEX_A76_CPUPWRCTLR_EL1, CORTEX_A76_CORE_PWRDN_EN_MASK
506
507	apply_erratum cortex_a76, ERRATUM(2743102), ERRATA_A76_2743102, NO_GET_CPU_REV
508
509	isb
510	ret
511endfunc cortex_a76_core_pwr_dwn
512
513	/* ---------------------------------------------
514	 * This function provides cortex_a76 specific
515	 * register information for crash reporting.
516	 * It needs to return with x6 pointing to
517	 * a list of register names in ascii and
518	 * x8 - x15 having values of registers to be
519	 * reported.
520	 * ---------------------------------------------
521	 */
522.section .rodata.cortex_a76_regs, "aS"
523cortex_a76_regs:  /* The ascii list of register names to be reported */
524	.asciz	"cpuectlr_el1", ""
525
526func cortex_a76_cpu_reg_dump
527	adr	x6, cortex_a76_regs
528	mrs	x8, CORTEX_A76_CPUECTLR_EL1
529	ret
530endfunc cortex_a76_cpu_reg_dump
531
532declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
533	cortex_a76_reset_func, \
534	CPU_NO_EXTRA1_FUNC, \
535	cortex_a76_disable_wa_cve_2018_3639, \
536	CPU_NO_EXTRA3_FUNC, \
537	cortex_a76_core_pwr_dwn
538