xref: /rk3399_ARM-atf/lib/cpus/aarch64/cortex_a76.S (revision b47dddd061e92054c3b2096fc8aa9688bfef68d6)
1/*
2 * Copyright (c) 2017-2025, Arm Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <common/bl_common.h>
10#include <cortex_a76.h>
11#include <cpu_macros.S>
12#include <dsu_macros.S>
13#include <plat_macros.S>
14#include <services/arm_arch_svc.h>
15#include "wa_cve_2022_23960_bhb.S"
16
17/* Hardware handled coherency */
18#if HW_ASSISTED_COHERENCY == 0
19#error "Cortex-A76 must be compiled with HW_ASSISTED_COHERENCY enabled"
20#endif
21	.globl cortex_a76_reset_func
22	.globl cortex_a76_core_pwr_dwn
23	.globl cortex_a76_disable_wa_cve_2018_3639
24
25/* 64-bit only core */
26#if CTX_INCLUDE_AARCH32_REGS == 1
27#error "Cortex-A76 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
28#endif
29
30#define ESR_EL3_A64_SMC0	0x5e000000
31#define ESR_EL3_A32_SMC0	0x4e000000
32
33cpu_reset_prologue cortex_a76
34
35#if DYNAMIC_WORKAROUND_CVE_2018_3639
36	/*
37	 * This macro applies the mitigation for CVE-2018-3639.
38	 * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
39	 * SMC calls from a lower EL running in AArch32 or AArch64
40	 * will go through the fast and return early.
41	 *
42	 * The macro saves x2-x3 to the context. In the fast path
43	 * x0-x3 registers do not need to be restored as the calling
44	 * context will have saved them. The macro also saves
45	 * x29-x30 to the context in the sync_exception path.
46	 */
47	.macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
48	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
49	.if \_is_sync_exception
50	stp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
51	mov_imm	w2, \_esr_el3_val
52	bl	apply_cve_2018_3639_sync_wa
53	ldp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
54	.endif
55	/*
56	 * Always enable v4 mitigation during EL3 execution. This is not
57	 * required for the fast path above because it does not perform any
58	 * memory loads.
59	 */
60	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
61	orr	x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
62	msr	CORTEX_A76_CPUACTLR2_EL1, x2
63	isb
64
65	/*
66	 * The caller may have passed arguments to EL3 via x2-x3.
67	 * Restore these registers from the context before jumping to the
68	 * main runtime vector table entry.
69	 */
70	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
71	.endm
72#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
73
74#if DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960
75vector_base cortex_a76_wa_cve_vbar
76
77	/* ---------------------------------------------------------------------
78	 * Current EL with SP_EL0 : 0x0 - 0x200
79	 * ---------------------------------------------------------------------
80	 */
81vector_entry cortex_a76_sync_exception_sp_el0
82	b	sync_exception_sp_el0
83end_vector_entry cortex_a76_sync_exception_sp_el0
84
85vector_entry cortex_a76_irq_sp_el0
86	b	irq_sp_el0
87end_vector_entry cortex_a76_irq_sp_el0
88
89vector_entry cortex_a76_fiq_sp_el0
90	b	fiq_sp_el0
91end_vector_entry cortex_a76_fiq_sp_el0
92
93vector_entry cortex_a76_serror_sp_el0
94	b	serror_sp_el0
95end_vector_entry cortex_a76_serror_sp_el0
96
97	/* ---------------------------------------------------------------------
98	 * Current EL with SP_ELx: 0x200 - 0x400
99	 * ---------------------------------------------------------------------
100	 */
101vector_entry cortex_a76_sync_exception_sp_elx
102	b	sync_exception_sp_elx
103end_vector_entry cortex_a76_sync_exception_sp_elx
104
105vector_entry cortex_a76_irq_sp_elx
106	b	irq_sp_elx
107end_vector_entry cortex_a76_irq_sp_elx
108
109vector_entry cortex_a76_fiq_sp_elx
110	b	fiq_sp_elx
111end_vector_entry cortex_a76_fiq_sp_elx
112
113vector_entry cortex_a76_serror_sp_elx
114	b	serror_sp_elx
115end_vector_entry cortex_a76_serror_sp_elx
116
117	/* ---------------------------------------------------------------------
118	 * Lower EL using AArch64 : 0x400 - 0x600
119	 * ---------------------------------------------------------------------
120	 */
121vector_entry cortex_a76_sync_exception_aarch64
122
123#if WORKAROUND_CVE_2022_23960
124	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
125#endif /* WORKAROUND_CVE_2022_23960 */
126
127#if DYNAMIC_WORKAROUND_CVE_2018_3639
128	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
129#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
130
131	b	sync_exception_aarch64
132end_vector_entry cortex_a76_sync_exception_aarch64
133
134vector_entry cortex_a76_irq_aarch64
135
136#if WORKAROUND_CVE_2022_23960
137	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
138#endif /* WORKAROUND_CVE_2022_23960 */
139
140#if DYNAMIC_WORKAROUND_CVE_2018_3639
141	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
142#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
143
144	b	irq_aarch64
145end_vector_entry cortex_a76_irq_aarch64
146
147vector_entry cortex_a76_fiq_aarch64
148
149#if WORKAROUND_CVE_2022_23960
150	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
151#endif /* WORKAROUND_CVE_2022_23960 */
152
153#if DYNAMIC_WORKAROUND_CVE_2018_3639
154	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
155#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
156
157	b	fiq_aarch64
158end_vector_entry cortex_a76_fiq_aarch64
159
160vector_entry cortex_a76_serror_aarch64
161
162#if WORKAROUND_CVE_2022_23960
163	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
164#endif /* WORKAROUND_CVE_2022_23960 */
165
166#if DYNAMIC_WORKAROUND_CVE_2018_3639
167	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
168#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
169
170	b	serror_aarch64
171end_vector_entry cortex_a76_serror_aarch64
172
173	/* ---------------------------------------------------------------------
174	 * Lower EL using AArch32 : 0x600 - 0x800
175	 * ---------------------------------------------------------------------
176	 */
177vector_entry cortex_a76_sync_exception_aarch32
178
179#if WORKAROUND_CVE_2022_23960
180	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
181#endif /* WORKAROUND_CVE_2022_23960 */
182
183#if DYNAMIC_WORKAROUND_CVE_2018_3639
184	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
185#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
186
187	b	sync_exception_aarch32
188end_vector_entry cortex_a76_sync_exception_aarch32
189
190vector_entry cortex_a76_irq_aarch32
191
192#if WORKAROUND_CVE_2022_23960
193	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
194#endif /* WORKAROUND_CVE_2022_23960 */
195
196#if DYNAMIC_WORKAROUND_CVE_2018_3639
197	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
198#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
199
200	b	irq_aarch32
201end_vector_entry cortex_a76_irq_aarch32
202
203vector_entry cortex_a76_fiq_aarch32
204
205#if WORKAROUND_CVE_2022_23960
206	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
207#endif /* WORKAROUND_CVE_2022_23960 */
208
209#if DYNAMIC_WORKAROUND_CVE_2018_3639
210	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
211#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
212
213	b	fiq_aarch32
214end_vector_entry cortex_a76_fiq_aarch32
215
216vector_entry cortex_a76_serror_aarch32
217
218#if WORKAROUND_CVE_2022_23960
219	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
220#endif /* WORKAROUND_CVE_2022_23960 */
221
222#if DYNAMIC_WORKAROUND_CVE_2018_3639
223	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
224#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
225
226	b	serror_aarch32
227end_vector_entry cortex_a76_serror_aarch32
228#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960 */
229
230#if DYNAMIC_WORKAROUND_CVE_2018_3639
231	/*
232	 * -----------------------------------------------------------------
233	 * This function applies the mitigation for CVE-2018-3639
234	 * specifically for sync exceptions. It implements a fast path
235	 * where `SMCCC_ARCH_WORKAROUND_2` SMC calls from a lower EL
236	 * running in AArch64 will go through the fast and return early.
237	 *
238	 * In the fast path x0-x3 registers do not need to be restored as the
239	 * calling context will have saved them.
240	 *
241	 * Caller must pass value of esr_el3 to compare via x2.
242	 * Save and restore these registers outside of this function from the
243	 * context before jumping to the main runtime vector table entry.
244	 *
245	 * Shall clobber: x0-x3, x30
246	 * -----------------------------------------------------------------
247	 */
248func apply_cve_2018_3639_sync_wa
249	/*
250	 * Ensure SMC is coming from A64/A32 state on #0
251	 * with W0 = SMCCC_ARCH_WORKAROUND_2
252	 *
253	 * This sequence evaluates as:
254	 *    (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
255	 * allowing use of a single branch operation
256	 * X2 populated outside this function with the SMC FID.
257	 */
258	orr	w3, wzr, #SMCCC_ARCH_WORKAROUND_2
259	cmp	x0, x3
260	mrs	x3, esr_el3
261
262	ccmp	w2, w3, #0, eq
263	/*
264	 * Static predictor will predict a fall-through, optimizing
265	 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
266	 */
267	bne	1f
268
269	/*
270	* The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
271	* fast path.
272	*/
273	cmp	x1, xzr /* enable/disable check */
274
275	/*
276	 * When the calling context wants mitigation disabled,
277	 * we program the mitigation disable function in the
278	 * CPU context, which gets invoked on subsequent exits from
279	 * EL3 via the `el3_exit` function. Otherwise NULL is
280	 * programmed in the CPU context, which results in caller's
281	 * inheriting the EL3 mitigation state (enabled) on subsequent
282	 * `el3_exit`.
283	 */
284	mov	x0, xzr
285	adr	x1, cortex_a76_disable_wa_cve_2018_3639
286	csel	x1, x1, x0, eq
287	str	x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
288
289	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
290	orr	x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
291	bic	x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
292	csel	x3, x3, x1, eq
293	msr	CORTEX_A76_CPUACTLR2_EL1, x3
294	ldp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
295	/*
296	* `SMCCC_ARCH_WORKAROUND_2`fast path return to lower EL.
297	*/
298	exception_return /* exception_return contains ISB */
2991:
300	ret
301endfunc apply_cve_2018_3639_sync_wa
302#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
303
304workaround_reset_start cortex_a76, ERRATUM(1073348), ERRATA_A76_1073348
305	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1 ,CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
306workaround_reset_end cortex_a76, ERRATUM(1073348)
307
308check_erratum_ls cortex_a76, ERRATUM(1073348), CPU_REV(1, 0)
309
310workaround_reset_start cortex_a76, ERRATUM(1130799), ERRATA_A76_1130799
311	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_BIT_59
312	msr	CORTEX_A76_CPUACTLR2_EL1, x1
313workaround_reset_end cortex_a76, ERRATUM(1130799)
314
315check_erratum_ls cortex_a76, ERRATUM(1130799), CPU_REV(2, 0)
316
317workaround_reset_start cortex_a76, ERRATUM(1220197), ERRATA_A76_1220197
318	sysreg_bit_set CORTEX_A76_CPUECTLR_EL1, CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
319workaround_reset_end cortex_a76, ERRATUM(1220197)
320
321check_erratum_ls cortex_a76, ERRATUM(1220197), CPU_REV(2, 0)
322
323workaround_reset_start cortex_a76, ERRATUM(1257314), ERRATA_A76_1257314
324	sysreg_bit_set CORTEX_A76_CPUACTLR3_EL1, CORTEX_A76_CPUACTLR3_EL1_BIT_10
325workaround_reset_end cortex_a76, ERRATUM(1257314)
326
327check_erratum_ls cortex_a76, ERRATUM(1257314), CPU_REV(3, 0)
328
329workaround_reset_start cortex_a76, ERRATUM(1262606), ERRATA_A76_1262606
330	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
331workaround_reset_end cortex_a76, ERRATUM(1262606)
332
333check_erratum_ls cortex_a76, ERRATUM(1262606), CPU_REV(3, 0)
334
335workaround_reset_start cortex_a76, ERRATUM(1262888), ERRATA_A76_1262888
336	sysreg_bit_set CORTEX_A76_CPUECTLR_EL1, CORTEX_A76_CPUECTLR_EL1_BIT_51
337workaround_reset_end cortex_a76, ERRATUM(1262888)
338
339check_erratum_ls cortex_a76, ERRATUM(1262888), CPU_REV(3, 0)
340
341workaround_reset_start cortex_a76, ERRATUM(1275112), ERRATA_A76_1275112
342	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
343workaround_reset_end cortex_a76, ERRATUM(1275112)
344
345check_erratum_ls cortex_a76, ERRATUM(1275112), CPU_REV(3, 0)
346
347check_erratum_custom_start cortex_a76, ERRATUM(1286807)
348#if ERRATA_A76_1286807
349	mov x0, #ERRATA_APPLIES
350#else
351	cpu_rev_var_ls	CPU_REV(3, 0)
352#endif
353	ret
354check_erratum_custom_end cortex_a76, ERRATUM(1286807)
355
356workaround_reset_start cortex_a76, ERRATUM(1791580), ERRATA_A76_1791580
357	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_BIT_2
358workaround_reset_end cortex_a76, ERRATUM(1791580)
359
360check_erratum_ls cortex_a76, ERRATUM(1791580), CPU_REV(4, 0)
361
362workaround_reset_start cortex_a76, ERRATUM(1868343), ERRATA_A76_1868343
363	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
364workaround_reset_end cortex_a76, ERRATUM(1868343)
365
366check_erratum_ls cortex_a76, ERRATUM(1868343), CPU_REV(4, 0)
367
368workaround_reset_start cortex_a76, ERRATUM(1946160), ERRATA_A76_1946160
369	mov	x0, #3
370	msr	S3_6_C15_C8_0, x0
371	ldr	x0, =0x10E3900002
372	msr	S3_6_C15_C8_2, x0
373	ldr	x0, =0x10FFF00083
374	msr	S3_6_C15_C8_3, x0
375	ldr	x0, =0x2001003FF
376	msr	S3_6_C15_C8_1, x0
377
378	mov	x0, #4
379	msr	S3_6_C15_C8_0, x0
380	ldr	x0, =0x10E3800082
381	msr	S3_6_C15_C8_2, x0
382	ldr	x0, =0x10FFF00083
383	msr	S3_6_C15_C8_3, x0
384	ldr	x0, =0x2001003FF
385	msr	S3_6_C15_C8_1, x0
386
387	mov	x0, #5
388	msr	S3_6_C15_C8_0, x0
389	ldr	x0, =0x10E3800200
390	msr	S3_6_C15_C8_2, x0
391	ldr	x0, =0x10FFF003E0
392	msr	S3_6_C15_C8_3, x0
393	ldr	x0, =0x2001003FF
394	msr	S3_6_C15_C8_1, x0
395workaround_reset_end cortex_a76, ERRATUM(1946160)
396
397check_erratum_range cortex_a76, ERRATUM(1946160), CPU_REV(3, 0), CPU_REV(4, 1)
398
399workaround_runtime_start cortex_a76, ERRATUM(2743102), ERRATA_A76_2743102
400	/* dsb before isb of power down sequence */
401	dsb	sy
402workaround_runtime_end cortex_a76, ERRATUM(2743102)
403
404check_erratum_ls cortex_a76, ERRATUM(2743102), CPU_REV(4, 1)
405
406check_erratum_chosen cortex_a76, CVE(2018, 3639), WORKAROUND_CVE_2018_3639
407
408func cortex_a76_disable_wa_cve_2018_3639
409	sysreg_bit_clear CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
410	isb
411	ret
412endfunc cortex_a76_disable_wa_cve_2018_3639
413
414/* --------------------------------------------------------------
415 * Errata Workaround for Cortex A76 Errata #1165522.
416 * This applies only to revisions <= r3p0 of Cortex A76.
417 * Due to the nature of the errata it is applied unconditionally
418 * when built in, report it as applicable in this case
419 * --------------------------------------------------------------
420 */
421check_erratum_custom_start cortex_a76, ERRATUM(1165522)
422#if ERRATA_A76_1165522
423	mov	x0, #ERRATA_APPLIES
424#else
425	cpu_rev_var_ls	CPU_REV(3, 0)
426#endif
427	ret
428check_erratum_custom_end cortex_a76, ERRATUM(1165522)
429
430check_erratum_chosen cortex_a76, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
431
432/* erratum has no workaround in the cpu. Generic code must take care */
433add_erratum_entry cortex_a76, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
434
435workaround_reset_start cortex_a76, ERRATUM(798953), ERRATA_DSU_798953
436	errata_dsu_798953_wa_impl
437workaround_reset_end cortex_a76, ERRATUM(798953)
438
439check_erratum_custom_start cortex_a76, ERRATUM(798953)
440	check_errata_dsu_798953_impl
441	ret
442check_erratum_custom_end cortex_a76, ERRATUM(798953)
443
444workaround_reset_start cortex_a76, ERRATUM(936184), ERRATA_DSU_936184
445	errata_dsu_936184_wa_impl
446workaround_reset_end cortex_a76, ERRATUM(936184)
447
448check_erratum_custom_start cortex_a76, ERRATUM(936184)
449	check_errata_dsu_936184_impl
450	ret
451check_erratum_custom_end cortex_a76, ERRATUM(936184)
452
453cpu_reset_func_start cortex_a76
454
455#if WORKAROUND_CVE_2018_3639
456	/* If the PE implements SSBS, we don't need the dynamic workaround */
457	mrs	x0, id_aa64pfr1_el1
458	lsr	x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
459	and	x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
460#if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
461	cmp	x0, 0
462	ASM_ASSERT(ne)
463#endif
464#if DYNAMIC_WORKAROUND_CVE_2018_3639
465	cbnz	x0, 1f
466	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
467	isb
468
469#ifdef IMAGE_BL31
470	/*
471	 * The Cortex-A76 generic vectors are overwritten to use the vectors
472	 * defined above. This is required in order to apply mitigation
473	 * against CVE-2018-3639 on exception entry from lower ELs.
474	 * If the below vector table is used, skip overriding it again for
475	 *  CVE_2022_23960 as both use the same vbar.
476	 */
477	override_vector_table cortex_a76_wa_cve_vbar
478	isb
479	b	2f
480#endif /* IMAGE_BL31 */
481
4821:
483#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
484#endif /* WORKAROUND_CVE_2018_3639 */
485
486#if IMAGE_BL31 && WORKAROUND_CVE_2022_23960
487	/*
488	 * The Cortex-A76 generic vectors are overridden to apply errata
489	 * mitigation on exception entry from lower ELs. This will be bypassed
490	 * if DYNAMIC_WORKAROUND_CVE_2018_3639 has overridden the vectors.
491	 */
492	override_vector_table cortex_a76_wa_cve_vbar
493	isb
494#endif /* IMAGE_BL31 && WORKAROUND_CVE_2022_23960 */
4952:
496cpu_reset_func_end cortex_a76
497
498	/* ---------------------------------------------
499	 * HW will do the cache maintenance while powering down
500	 * ---------------------------------------------
501	 */
502func cortex_a76_core_pwr_dwn
503	/* ---------------------------------------------
504	 * Enable CPU power down bit in power control register
505	 * ---------------------------------------------
506	 */
507	sysreg_bit_set CORTEX_A76_CPUPWRCTLR_EL1, CORTEX_A76_CORE_PWRDN_EN_MASK
508
509	apply_erratum cortex_a76, ERRATUM(2743102), ERRATA_A76_2743102, NO_GET_CPU_REV
510
511	isb
512	ret
513endfunc cortex_a76_core_pwr_dwn
514
515	/* ---------------------------------------------
516	 * This function provides cortex_a76 specific
517	 * register information for crash reporting.
518	 * It needs to return with x6 pointing to
519	 * a list of register names in ascii and
520	 * x8 - x15 having values of registers to be
521	 * reported.
522	 * ---------------------------------------------
523	 */
524.section .rodata.cortex_a76_regs, "aS"
525cortex_a76_regs:  /* The ascii list of register names to be reported */
526	.asciz	"cpuectlr_el1", ""
527
528func cortex_a76_cpu_reg_dump
529	adr	x6, cortex_a76_regs
530	mrs	x8, CORTEX_A76_CPUECTLR_EL1
531	ret
532endfunc cortex_a76_cpu_reg_dump
533
534declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
535	cortex_a76_reset_func, \
536	CPU_NO_EXTRA1_FUNC, \
537	cortex_a76_disable_wa_cve_2018_3639, \
538	CPU_NO_EXTRA3_FUNC, \
539	cortex_a76_core_pwr_dwn
540