xref: /rk3399_ARM-atf/lib/cpus/aarch64/cortex_a76.S (revision b62673c645752a78f649282cfa293e8da09e3bef)
1/*
2 * Copyright (c) 2017-2025, Arm Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <common/bl_common.h>
10#include <cortex_a76.h>
11#include <cpu_macros.S>
12#include <dsu_macros.S>
13#include <plat_macros.S>
14#include <services/arm_arch_svc.h>
15#include "wa_cve_2022_23960_bhb.S"
16
17/* Hardware handled coherency */
18#if HW_ASSISTED_COHERENCY == 0
19#error "Cortex-A76 must be compiled with HW_ASSISTED_COHERENCY enabled"
20#endif
21	.globl cortex_a76_reset_func
22	.globl cortex_a76_core_pwr_dwn
23	.globl cortex_a76_disable_wa_cve_2018_3639
24
25/* 64-bit only core */
26#if CTX_INCLUDE_AARCH32_REGS == 1
27#error "Cortex-A76 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
28#endif
29
30#define ESR_EL3_A64_SMC0	0x5e000000
31#define ESR_EL3_A32_SMC0	0x4e000000
32
33#if DYNAMIC_WORKAROUND_CVE_2018_3639
34	/*
35	 * This macro applies the mitigation for CVE-2018-3639.
36	 * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
37	 * SMC calls from a lower EL running in AArch32 or AArch64
38	 * will go through the fast and return early.
39	 *
40	 * The macro saves x2-x3 to the context. In the fast path
41	 * x0-x3 registers do not need to be restored as the calling
42	 * context will have saved them. The macro also saves
43	 * x29-x30 to the context in the sync_exception path.
44	 */
45	.macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
46	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
47	.if \_is_sync_exception
48	stp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
49	mov_imm	w2, \_esr_el3_val
50	bl	apply_cve_2018_3639_sync_wa
51	ldp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
52	.endif
53	/*
54	 * Always enable v4 mitigation during EL3 execution. This is not
55	 * required for the fast path above because it does not perform any
56	 * memory loads.
57	 */
58	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
59	orr	x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
60	msr	CORTEX_A76_CPUACTLR2_EL1, x2
61	isb
62
63	/*
64	 * The caller may have passed arguments to EL3 via x2-x3.
65	 * Restore these registers from the context before jumping to the
66	 * main runtime vector table entry.
67	 */
68	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
69	.endm
70#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
71
72#if DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960
73vector_base cortex_a76_wa_cve_vbar
74
75	/* ---------------------------------------------------------------------
76	 * Current EL with SP_EL0 : 0x0 - 0x200
77	 * ---------------------------------------------------------------------
78	 */
79vector_entry cortex_a76_sync_exception_sp_el0
80	b	sync_exception_sp_el0
81end_vector_entry cortex_a76_sync_exception_sp_el0
82
83vector_entry cortex_a76_irq_sp_el0
84	b	irq_sp_el0
85end_vector_entry cortex_a76_irq_sp_el0
86
87vector_entry cortex_a76_fiq_sp_el0
88	b	fiq_sp_el0
89end_vector_entry cortex_a76_fiq_sp_el0
90
91vector_entry cortex_a76_serror_sp_el0
92	b	serror_sp_el0
93end_vector_entry cortex_a76_serror_sp_el0
94
95	/* ---------------------------------------------------------------------
96	 * Current EL with SP_ELx: 0x200 - 0x400
97	 * ---------------------------------------------------------------------
98	 */
99vector_entry cortex_a76_sync_exception_sp_elx
100	b	sync_exception_sp_elx
101end_vector_entry cortex_a76_sync_exception_sp_elx
102
103vector_entry cortex_a76_irq_sp_elx
104	b	irq_sp_elx
105end_vector_entry cortex_a76_irq_sp_elx
106
107vector_entry cortex_a76_fiq_sp_elx
108	b	fiq_sp_elx
109end_vector_entry cortex_a76_fiq_sp_elx
110
111vector_entry cortex_a76_serror_sp_elx
112	b	serror_sp_elx
113end_vector_entry cortex_a76_serror_sp_elx
114
115	/* ---------------------------------------------------------------------
116	 * Lower EL using AArch64 : 0x400 - 0x600
117	 * ---------------------------------------------------------------------
118	 */
119vector_entry cortex_a76_sync_exception_aarch64
120
121#if WORKAROUND_CVE_2022_23960
122	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
123#endif /* WORKAROUND_CVE_2022_23960 */
124
125#if DYNAMIC_WORKAROUND_CVE_2018_3639
126	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
127#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
128
129	b	sync_exception_aarch64
130end_vector_entry cortex_a76_sync_exception_aarch64
131
132vector_entry cortex_a76_irq_aarch64
133
134#if WORKAROUND_CVE_2022_23960
135	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
136#endif /* WORKAROUND_CVE_2022_23960 */
137
138#if DYNAMIC_WORKAROUND_CVE_2018_3639
139	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
140#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
141
142	b	irq_aarch64
143end_vector_entry cortex_a76_irq_aarch64
144
145vector_entry cortex_a76_fiq_aarch64
146
147#if WORKAROUND_CVE_2022_23960
148	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
149#endif /* WORKAROUND_CVE_2022_23960 */
150
151#if DYNAMIC_WORKAROUND_CVE_2018_3639
152	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
153#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
154
155	b	fiq_aarch64
156end_vector_entry cortex_a76_fiq_aarch64
157
158vector_entry cortex_a76_serror_aarch64
159
160#if WORKAROUND_CVE_2022_23960
161	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
162#endif /* WORKAROUND_CVE_2022_23960 */
163
164#if DYNAMIC_WORKAROUND_CVE_2018_3639
165	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
166#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
167
168	b	serror_aarch64
169end_vector_entry cortex_a76_serror_aarch64
170
171	/* ---------------------------------------------------------------------
172	 * Lower EL using AArch32 : 0x600 - 0x800
173	 * ---------------------------------------------------------------------
174	 */
175vector_entry cortex_a76_sync_exception_aarch32
176
177#if WORKAROUND_CVE_2022_23960
178	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
179#endif /* WORKAROUND_CVE_2022_23960 */
180
181#if DYNAMIC_WORKAROUND_CVE_2018_3639
182	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
183#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
184
185	b	sync_exception_aarch32
186end_vector_entry cortex_a76_sync_exception_aarch32
187
188vector_entry cortex_a76_irq_aarch32
189
190#if WORKAROUND_CVE_2022_23960
191	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
192#endif /* WORKAROUND_CVE_2022_23960 */
193
194#if DYNAMIC_WORKAROUND_CVE_2018_3639
195	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
196#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
197
198	b	irq_aarch32
199end_vector_entry cortex_a76_irq_aarch32
200
201vector_entry cortex_a76_fiq_aarch32
202
203#if WORKAROUND_CVE_2022_23960
204	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
205#endif /* WORKAROUND_CVE_2022_23960 */
206
207#if DYNAMIC_WORKAROUND_CVE_2018_3639
208	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
209#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
210
211	b	fiq_aarch32
212end_vector_entry cortex_a76_fiq_aarch32
213
214vector_entry cortex_a76_serror_aarch32
215
216#if WORKAROUND_CVE_2022_23960
217	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
218#endif /* WORKAROUND_CVE_2022_23960 */
219
220#if DYNAMIC_WORKAROUND_CVE_2018_3639
221	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
222#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
223
224	b	serror_aarch32
225end_vector_entry cortex_a76_serror_aarch32
226#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960 */
227
228#if DYNAMIC_WORKAROUND_CVE_2018_3639
229	/*
230	 * -----------------------------------------------------------------
231	 * This function applies the mitigation for CVE-2018-3639
232	 * specifically for sync exceptions. It implements a fast path
233	 * where `SMCCC_ARCH_WORKAROUND_2` SMC calls from a lower EL
234	 * running in AArch64 will go through the fast and return early.
235	 *
236	 * In the fast path x0-x3 registers do not need to be restored as the
237	 * calling context will have saved them.
238	 *
239	 * Caller must pass value of esr_el3 to compare via x2.
240	 * Save and restore these registers outside of this function from the
241	 * context before jumping to the main runtime vector table entry.
242	 *
243	 * Shall clobber: x0-x3, x30
244	 * -----------------------------------------------------------------
245	 */
246func apply_cve_2018_3639_sync_wa
247	/*
248	 * Ensure SMC is coming from A64/A32 state on #0
249	 * with W0 = SMCCC_ARCH_WORKAROUND_2
250	 *
251	 * This sequence evaluates as:
252	 *    (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
253	 * allowing use of a single branch operation
254	 * X2 populated outside this function with the SMC FID.
255	 */
256	orr	w3, wzr, #SMCCC_ARCH_WORKAROUND_2
257	cmp	x0, x3
258	mrs	x3, esr_el3
259
260	ccmp	w2, w3, #0, eq
261	/*
262	 * Static predictor will predict a fall-through, optimizing
263	 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
264	 */
265	bne	1f
266
267	/*
268	* The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
269	* fast path.
270	*/
271	cmp	x1, xzr /* enable/disable check */
272
273	/*
274	 * When the calling context wants mitigation disabled,
275	 * we program the mitigation disable function in the
276	 * CPU context, which gets invoked on subsequent exits from
277	 * EL3 via the `el3_exit` function. Otherwise NULL is
278	 * programmed in the CPU context, which results in caller's
279	 * inheriting the EL3 mitigation state (enabled) on subsequent
280	 * `el3_exit`.
281	 */
282	mov	x0, xzr
283	adr	x1, cortex_a76_disable_wa_cve_2018_3639
284	csel	x1, x1, x0, eq
285	str	x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
286
287	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
288	orr	x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
289	bic	x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
290	csel	x3, x3, x1, eq
291	msr	CORTEX_A76_CPUACTLR2_EL1, x3
292	ldp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
293	/*
294	* `SMCCC_ARCH_WORKAROUND_2`fast path return to lower EL.
295	*/
296	exception_return /* exception_return contains ISB */
2971:
298	ret
299endfunc apply_cve_2018_3639_sync_wa
300#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
301
302workaround_reset_start cortex_a76, ERRATUM(1073348), ERRATA_A76_1073348
303	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1 ,CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
304workaround_reset_end cortex_a76, ERRATUM(1073348)
305
306check_erratum_ls cortex_a76, ERRATUM(1073348), CPU_REV(1, 0)
307
308workaround_reset_start cortex_a76, ERRATUM(1130799), ERRATA_A76_1130799
309	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_BIT_59
310	msr	CORTEX_A76_CPUACTLR2_EL1, x1
311workaround_reset_end cortex_a76, ERRATUM(1130799)
312
313check_erratum_ls cortex_a76, ERRATUM(1130799), CPU_REV(2, 0)
314
315workaround_reset_start cortex_a76, ERRATUM(1220197), ERRATA_A76_1220197
316	sysreg_bit_set CORTEX_A76_CPUECTLR_EL1, CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
317workaround_reset_end cortex_a76, ERRATUM(1220197)
318
319check_erratum_ls cortex_a76, ERRATUM(1220197), CPU_REV(2, 0)
320
321workaround_reset_start cortex_a76, ERRATUM(1257314), ERRATA_A76_1257314
322	sysreg_bit_set CORTEX_A76_CPUACTLR3_EL1, CORTEX_A76_CPUACTLR3_EL1_BIT_10
323workaround_reset_end cortex_a76, ERRATUM(1257314)
324
325check_erratum_ls cortex_a76, ERRATUM(1257314), CPU_REV(3, 0)
326
327workaround_reset_start cortex_a76, ERRATUM(1262606), ERRATA_A76_1262606
328	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
329workaround_reset_end cortex_a76, ERRATUM(1262606)
330
331check_erratum_ls cortex_a76, ERRATUM(1262606), CPU_REV(3, 0)
332
333workaround_reset_start cortex_a76, ERRATUM(1262888), ERRATA_A76_1262888
334	sysreg_bit_set CORTEX_A76_CPUECTLR_EL1, CORTEX_A76_CPUECTLR_EL1_BIT_51
335workaround_reset_end cortex_a76, ERRATUM(1262888)
336
337check_erratum_ls cortex_a76, ERRATUM(1262888), CPU_REV(3, 0)
338
339workaround_reset_start cortex_a76, ERRATUM(1275112), ERRATA_A76_1275112
340	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
341workaround_reset_end cortex_a76, ERRATUM(1275112)
342
343check_erratum_ls cortex_a76, ERRATUM(1275112), CPU_REV(3, 0)
344
345check_erratum_custom_start cortex_a76, ERRATUM(1286807)
346#if ERRATA_A76_1286807
347	mov x0, #ERRATA_APPLIES
348	ret
349#else
350	mov	x1, #0x30
351	b	cpu_rev_var_ls
352#endif
353check_erratum_custom_end cortex_a76, ERRATUM(1286807)
354
355workaround_reset_start cortex_a76, ERRATUM(1791580), ERRATA_A76_1791580
356	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_BIT_2
357workaround_reset_end cortex_a76, ERRATUM(1791580)
358
359check_erratum_ls cortex_a76, ERRATUM(1791580), CPU_REV(4, 0)
360
361workaround_reset_start cortex_a76, ERRATUM(1868343), ERRATA_A76_1868343
362	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
363workaround_reset_end cortex_a76, ERRATUM(1868343)
364
365check_erratum_ls cortex_a76, ERRATUM(1868343), CPU_REV(4, 0)
366
367workaround_reset_start cortex_a76, ERRATUM(1946160), ERRATA_A76_1946160
368	mov	x0, #3
369	msr	S3_6_C15_C8_0, x0
370	ldr	x0, =0x10E3900002
371	msr	S3_6_C15_C8_2, x0
372	ldr	x0, =0x10FFF00083
373	msr	S3_6_C15_C8_3, x0
374	ldr	x0, =0x2001003FF
375	msr	S3_6_C15_C8_1, x0
376
377	mov	x0, #4
378	msr	S3_6_C15_C8_0, x0
379	ldr	x0, =0x10E3800082
380	msr	S3_6_C15_C8_2, x0
381	ldr	x0, =0x10FFF00083
382	msr	S3_6_C15_C8_3, x0
383	ldr	x0, =0x2001003FF
384	msr	S3_6_C15_C8_1, x0
385
386	mov	x0, #5
387	msr	S3_6_C15_C8_0, x0
388	ldr	x0, =0x10E3800200
389	msr	S3_6_C15_C8_2, x0
390	ldr	x0, =0x10FFF003E0
391	msr	S3_6_C15_C8_3, x0
392	ldr	x0, =0x2001003FF
393	msr	S3_6_C15_C8_1, x0
394workaround_reset_end cortex_a76, ERRATUM(1946160)
395
396check_erratum_range cortex_a76, ERRATUM(1946160), CPU_REV(3, 0), CPU_REV(4, 1)
397
398workaround_runtime_start cortex_a76, ERRATUM(2743102), ERRATA_A76_2743102
399	/* dsb before isb of power down sequence */
400	dsb	sy
401workaround_runtime_end cortex_a76, ERRATUM(2743102)
402
403check_erratum_ls cortex_a76, ERRATUM(2743102), CPU_REV(4, 1)
404
405check_erratum_chosen cortex_a76, CVE(2018, 3639), WORKAROUND_CVE_2018_3639
406
407func cortex_a76_disable_wa_cve_2018_3639
408	sysreg_bit_clear CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
409	isb
410	ret
411endfunc cortex_a76_disable_wa_cve_2018_3639
412
413/* --------------------------------------------------------------
414 * Errata Workaround for Cortex A76 Errata #1165522.
415 * This applies only to revisions <= r3p0 of Cortex A76.
416 * Due to the nature of the errata it is applied unconditionally
417 * when built in, report it as applicable in this case
418 * --------------------------------------------------------------
419 */
420check_erratum_custom_start cortex_a76, ERRATUM(1165522)
421#if ERRATA_A76_1165522
422	mov	x0, #ERRATA_APPLIES
423	ret
424#else
425	mov	x1, #0x30
426	b	cpu_rev_var_ls
427#endif
428check_erratum_custom_end cortex_a76, ERRATUM(1165522)
429
430check_erratum_chosen cortex_a76, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
431
432/* erratum has no workaround in the cpu. Generic code must take care */
433add_erratum_entry cortex_a76, CVE(2022, 23960), WORKAROUND_CVE_2022_23960, NO_APPLY_AT_RESET
434
435workaround_reset_start cortex_a76, ERRATUM(798953), ERRATA_DSU_798953
436	errata_dsu_798953_wa_impl
437workaround_reset_end cortex_a76, ERRATUM(798953)
438
439check_erratum_custom_start cortex_a76, ERRATUM(798953)
440	check_errata_dsu_798953_impl
441	ret
442check_erratum_custom_end cortex_a76, ERRATUM(798953)
443
444workaround_reset_start cortex_a76, ERRATUM(936184), ERRATA_DSU_936184
445	errata_dsu_936184_wa_impl
446workaround_reset_end cortex_a76, ERRATUM(936184)
447
448check_erratum_custom_start cortex_a76, ERRATUM(936184)
449	check_errata_dsu_936184_impl
450	ret
451check_erratum_custom_end cortex_a76, ERRATUM(936184)
452
453cpu_reset_func_start cortex_a76
454
455#if WORKAROUND_CVE_2018_3639
456	/* If the PE implements SSBS, we don't need the dynamic workaround */
457	mrs	x0, id_aa64pfr1_el1
458	lsr	x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
459	and	x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
460#if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
461	cmp	x0, 0
462	ASM_ASSERT(ne)
463#endif
464#if DYNAMIC_WORKAROUND_CVE_2018_3639
465	cbnz	x0, 1f
466	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
467	isb
468
469#ifdef IMAGE_BL31
470	/*
471	 * The Cortex-A76 generic vectors are overwritten to use the vectors
472	 * defined above. This is required in order to apply mitigation
473	 * against CVE-2018-3639 on exception entry from lower ELs.
474	 * If the below vector table is used, skip overriding it again for
475	 *  CVE_2022_23960 as both use the same vbar.
476	 */
477	override_vector_table cortex_a76_wa_cve_vbar
478	isb
479	b	2f
480#endif /* IMAGE_BL31 */
481
4821:
483#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
484#endif /* WORKAROUND_CVE_2018_3639 */
485
486#if IMAGE_BL31 && WORKAROUND_CVE_2022_23960
487	/*
488	 * The Cortex-A76 generic vectors are overridden to apply errata
489	 * mitigation on exception entry from lower ELs. This will be bypassed
490	 * if DYNAMIC_WORKAROUND_CVE_2018_3639 has overridden the vectors.
491	 */
492	override_vector_table cortex_a76_wa_cve_vbar
493	isb
494#endif /* IMAGE_BL31 && WORKAROUND_CVE_2022_23960 */
4952:
496cpu_reset_func_end cortex_a76
497
498	/* ---------------------------------------------
499	 * HW will do the cache maintenance while powering down
500	 * ---------------------------------------------
501	 */
502func cortex_a76_core_pwr_dwn
503	/* ---------------------------------------------
504	 * Enable CPU power down bit in power control register
505	 * ---------------------------------------------
506	 */
507	sysreg_bit_set CORTEX_A76_CPUPWRCTLR_EL1, CORTEX_A76_CORE_PWRDN_EN_MASK
508
509	apply_erratum cortex_a76, ERRATUM(2743102), ERRATA_A76_2743102, NO_GET_CPU_REV
510
511	isb
512	ret
513endfunc cortex_a76_core_pwr_dwn
514
515	/* ---------------------------------------------
516	 * This function provides cortex_a76 specific
517	 * register information for crash reporting.
518	 * It needs to return with x6 pointing to
519	 * a list of register names in ascii and
520	 * x8 - x15 having values of registers to be
521	 * reported.
522	 * ---------------------------------------------
523	 */
524.section .rodata.cortex_a76_regs, "aS"
525cortex_a76_regs:  /* The ascii list of register names to be reported */
526	.asciz	"cpuectlr_el1", ""
527
528func cortex_a76_cpu_reg_dump
529	adr	x6, cortex_a76_regs
530	mrs	x8, CORTEX_A76_CPUECTLR_EL1
531	ret
532endfunc cortex_a76_cpu_reg_dump
533
534declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
535	cortex_a76_reset_func, \
536	CPU_NO_EXTRA1_FUNC, \
537	cortex_a76_disable_wa_cve_2018_3639, \
538	CPU_NO_EXTRA3_FUNC, \
539	cortex_a76_core_pwr_dwn
540