xref: /rk3399_ARM-atf/lib/cpus/aarch64/cortex_a76.S (revision c4351f7f62449e8c8e58e71c398f7fc5c96bbfe8)
1/*
2 * Copyright (c) 2017-2026, Arm Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <common/bl_common.h>
10#include <cortex_a76.h>
11#include <cpu_macros.S>
12#include <dsu_macros.S>
13#include <plat_macros.S>
14#include <services/arm_arch_svc.h>
15#include "wa_cve_2022_23960_bhb.S"
16
17/* Hardware handled coherency */
18#if HW_ASSISTED_COHERENCY == 0
19#error "Cortex-A76 must be compiled with HW_ASSISTED_COHERENCY enabled"
20#endif
21	.globl cortex_a76_reset_func
22	.globl cortex_a76_core_pwr_dwn
23	.globl cortex_a76_disable_wa_cve_2018_3639
24
25/* 64-bit only core */
26#if CTX_INCLUDE_AARCH32_REGS == 1
27#error "Cortex-A76 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
28#endif
29
30#define ESR_EL3_A64_SMC0	0x5e000000
31#define ESR_EL3_A32_SMC0	0x4e000000
32
33cpu_reset_prologue cortex_a76
34
35#if DYNAMIC_WORKAROUND_CVE_2018_3639
36	/*
37	 * This macro applies the mitigation for CVE-2018-3639.
38	 * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
39	 * SMC calls from a lower EL running in AArch32 or AArch64
40	 * will go through the fast and return early.
41	 *
42	 * The macro saves x2-x3 to the context. In the fast path
43	 * x0-x3 registers do not need to be restored as the calling
44	 * context will have saved them. The macro also saves
45	 * x29-x30 to the context in the sync_exception path.
46	 */
47	.macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
48	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
49	.if \_is_sync_exception
50	stp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
51	mov_imm	w2, \_esr_el3_val
52	bl	apply_cve_2018_3639_sync_wa
53	ldp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
54	.endif
55	/*
56	 * Always enable v4 mitigation during EL3 execution. This is not
57	 * required for the fast path above because it does not perform any
58	 * memory loads.
59	 */
60	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
61	orr	x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
62	msr	CORTEX_A76_CPUACTLR2_EL1, x2
63	isb
64
65	/*
66	 * The caller may have passed arguments to EL3 via x2-x3.
67	 * Restore these registers from the context before jumping to the
68	 * main runtime vector table entry.
69	 */
70	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
71	.endm
72#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
73
74#if DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960
75vector_base cortex_a76_wa_cve_vbar
76
77	/* ---------------------------------------------------------------------
78	 * Current EL with SP_EL0 : 0x0 - 0x200
79	 * ---------------------------------------------------------------------
80	 */
81vector_entry cortex_a76_sync_exception_sp_el0
82	b	sync_exception_sp_el0
83end_vector_entry cortex_a76_sync_exception_sp_el0
84
85vector_entry cortex_a76_irq_sp_el0
86	b	irq_sp_el0
87end_vector_entry cortex_a76_irq_sp_el0
88
89vector_entry cortex_a76_fiq_sp_el0
90	b	fiq_sp_el0
91end_vector_entry cortex_a76_fiq_sp_el0
92
93vector_entry cortex_a76_serror_sp_el0
94	b	serror_sp_el0
95end_vector_entry cortex_a76_serror_sp_el0
96
97	/* ---------------------------------------------------------------------
98	 * Current EL with SP_ELx: 0x200 - 0x400
99	 * ---------------------------------------------------------------------
100	 */
101vector_entry cortex_a76_sync_exception_sp_elx
102	b	sync_exception_sp_elx
103end_vector_entry cortex_a76_sync_exception_sp_elx
104
105vector_entry cortex_a76_irq_sp_elx
106	b	irq_sp_elx
107end_vector_entry cortex_a76_irq_sp_elx
108
109vector_entry cortex_a76_fiq_sp_elx
110	b	fiq_sp_elx
111end_vector_entry cortex_a76_fiq_sp_elx
112
113vector_entry cortex_a76_serror_sp_elx
114	b	serror_sp_elx
115end_vector_entry cortex_a76_serror_sp_elx
116
117	/* ---------------------------------------------------------------------
118	 * Lower EL using AArch64 : 0x400 - 0x600
119	 * ---------------------------------------------------------------------
120	 */
121vector_entry cortex_a76_sync_exception_aarch64
122
123#if WORKAROUND_CVE_2022_23960
124	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
125#endif /* WORKAROUND_CVE_2022_23960 */
126
127#if DYNAMIC_WORKAROUND_CVE_2018_3639
128	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
129#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
130
131	b	sync_exception_aarch64
132end_vector_entry cortex_a76_sync_exception_aarch64
133
134vector_entry cortex_a76_irq_aarch64
135
136#if WORKAROUND_CVE_2022_23960
137	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
138#endif /* WORKAROUND_CVE_2022_23960 */
139
140#if DYNAMIC_WORKAROUND_CVE_2018_3639
141	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
142#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
143
144	b	irq_aarch64
145end_vector_entry cortex_a76_irq_aarch64
146
147vector_entry cortex_a76_fiq_aarch64
148
149#if WORKAROUND_CVE_2022_23960
150	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
151#endif /* WORKAROUND_CVE_2022_23960 */
152
153#if DYNAMIC_WORKAROUND_CVE_2018_3639
154	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
155#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
156
157	b	fiq_aarch64
158end_vector_entry cortex_a76_fiq_aarch64
159
160vector_entry cortex_a76_serror_aarch64
161
162#if WORKAROUND_CVE_2022_23960
163	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
164#endif /* WORKAROUND_CVE_2022_23960 */
165
166#if DYNAMIC_WORKAROUND_CVE_2018_3639
167	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
168#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
169
170	b	serror_aarch64
171end_vector_entry cortex_a76_serror_aarch64
172
173	/* ---------------------------------------------------------------------
174	 * Lower EL using AArch32 : 0x600 - 0x800
175	 * ---------------------------------------------------------------------
176	 */
177vector_entry cortex_a76_sync_exception_aarch32
178
179#if WORKAROUND_CVE_2022_23960
180	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
181#endif /* WORKAROUND_CVE_2022_23960 */
182
183#if DYNAMIC_WORKAROUND_CVE_2018_3639
184	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
185#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
186
187	b	sync_exception_aarch32
188end_vector_entry cortex_a76_sync_exception_aarch32
189
190vector_entry cortex_a76_irq_aarch32
191
192#if WORKAROUND_CVE_2022_23960
193	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
194#endif /* WORKAROUND_CVE_2022_23960 */
195
196#if DYNAMIC_WORKAROUND_CVE_2018_3639
197	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
198#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
199
200	b	irq_aarch32
201end_vector_entry cortex_a76_irq_aarch32
202
203vector_entry cortex_a76_fiq_aarch32
204
205#if WORKAROUND_CVE_2022_23960
206	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
207#endif /* WORKAROUND_CVE_2022_23960 */
208
209#if DYNAMIC_WORKAROUND_CVE_2018_3639
210	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
211#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
212
213	b	fiq_aarch32
214end_vector_entry cortex_a76_fiq_aarch32
215
216vector_entry cortex_a76_serror_aarch32
217
218#if WORKAROUND_CVE_2022_23960
219	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
220#endif /* WORKAROUND_CVE_2022_23960 */
221
222#if DYNAMIC_WORKAROUND_CVE_2018_3639
223	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
224#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
225
226	b	serror_aarch32
227end_vector_entry cortex_a76_serror_aarch32
228#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960 */
229
230#if DYNAMIC_WORKAROUND_CVE_2018_3639
231	/*
232	 * -----------------------------------------------------------------
233	 * This function applies the mitigation for CVE-2018-3639
234	 * specifically for sync exceptions. It implements a fast path
235	 * where `SMCCC_ARCH_WORKAROUND_2` SMC calls from a lower EL
236	 * running in AArch64 will go through the fast and return early.
237	 *
238	 * In the fast path x0-x3 registers do not need to be restored as the
239	 * calling context will have saved them.
240	 *
241	 * Caller must pass value of esr_el3 to compare via x2.
242	 * Save and restore these registers outside of this function from the
243	 * context before jumping to the main runtime vector table entry.
244	 *
245	 * Shall clobber: x0-x3, x30
246	 * -----------------------------------------------------------------
247	 */
248func apply_cve_2018_3639_sync_wa
249	/*
250	 * Ensure SMC is coming from A64/A32 state on #0
251	 * with W0 = SMCCC_ARCH_WORKAROUND_2
252	 *
253	 * This sequence evaluates as:
254	 *    (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
255	 * allowing use of a single branch operation
256	 * X2 populated outside this function with the SMC FID.
257	 */
258	orr	w3, wzr, #SMCCC_ARCH_WORKAROUND_2
259	cmp	x0, x3
260	mrs	x3, esr_el3
261
262	ccmp	w2, w3, #0, eq
263	/*
264	 * Static predictor will predict a fall-through, optimizing
265	 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
266	 */
267	bne	1f
268
269	/*
270	* The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
271	* fast path.
272	*/
273	cmp	x1, xzr /* enable/disable check */
274
275	/*
276	 * When the calling context wants mitigation disabled,
277	 * we program the mitigation disable function in the
278	 * CPU context, which gets invoked on subsequent exits from
279	 * EL3 via the `el3_exit` function. Otherwise NULL is
280	 * programmed in the CPU context, which results in caller's
281	 * inheriting the EL3 mitigation state (enabled) on subsequent
282	 * `el3_exit`.
283	 */
284	mov	x0, xzr
285	adr	x1, cortex_a76_disable_wa_cve_2018_3639
286	csel	x1, x1, x0, eq
287	str	x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
288
289	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
290	orr	x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
291	bic	x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
292	csel	x3, x3, x1, eq
293	msr	CORTEX_A76_CPUACTLR2_EL1, x3
294	ldp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
295	/*
296	* `SMCCC_ARCH_WORKAROUND_2`fast path return to lower EL.
297	*/
298	exception_return /* exception_return contains ISB */
2991:
300	ret
301endfunc apply_cve_2018_3639_sync_wa
302#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
303
304/* Erratum entry and check function for SMCCC_ARCH_WORKAROUND_2*/
305add_erratum_entry cortex_a76, ERRATUM(ARCH_WORKAROUND_2), WORKAROUND_CVE_2018_3639
306
307check_erratum_chosen cortex_a76, ERRATUM(ARCH_WORKAROUND_2), WORKAROUND_CVE_2018_3639
308
309workaround_reset_start cortex_a76, ERRATUM(798953), ERRATA_DSU_798953
310	errata_dsu_798953_wa_impl
311workaround_reset_end cortex_a76, ERRATUM(798953)
312
313check_erratum_custom_start cortex_a76, ERRATUM(798953)
314	check_errata_dsu_798953_impl
315	ret
316check_erratum_custom_end cortex_a76, ERRATUM(798953)
317
318workaround_reset_start cortex_a76, ERRATUM(936184), ERRATA_DSU_936184
319	errata_dsu_936184_wa_impl
320workaround_reset_end cortex_a76, ERRATUM(936184)
321
322check_erratum_custom_start cortex_a76, ERRATUM(936184)
323	check_errata_dsu_936184_impl
324	ret
325check_erratum_custom_end cortex_a76, ERRATUM(936184)
326
327workaround_reset_start cortex_a76, ERRATUM(1073348), ERRATA_A76_1073348
328	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1 ,CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
329workaround_reset_end cortex_a76, ERRATUM(1073348)
330
331check_erratum_ls cortex_a76, ERRATUM(1073348), CPU_REV(1, 0)
332
333workaround_reset_start cortex_a76, ERRATUM(1130799), ERRATA_A76_1130799
334	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_BIT_59
335	msr	CORTEX_A76_CPUACTLR2_EL1, x1
336workaround_reset_end cortex_a76, ERRATUM(1130799)
337
338check_erratum_ls cortex_a76, ERRATUM(1130799), CPU_REV(2, 0)
339
340workaround_reset_start cortex_a76, ERRATUM(1165347), ERRATA_A76_1165347
341	sysreg_lazy_start CORTEX_A76_CPUACTLR2_EL1
342	sysreg_lazy_set BIT(0)
343	sysreg_lazy_set BIT(15)
344	sysreg_lazy_commit CORTEX_A76_CPUACTLR2_EL1
345workaround_reset_end cortex_a76, ERRATUM(1165347)
346
347check_erratum_ls cortex_a76, ERRATUM(1165347), CPU_REV(2, 0)
348
349/* --------------------------------------------------------------
350 * Errata Workaround for Cortex A76 Errata #1165522.
351 * This applies only to revisions <= r3p0 of Cortex A76.
352 * Due to the nature of the errata it is applied unconditionally
353 * when built in, report it as applicable in this case
354 * --------------------------------------------------------------
355 */
356check_erratum_custom_start cortex_a76, ERRATUM(1165522)
357#if ERRATA_A76_1165522
358	mov	x0, #ERRATA_APPLIES
359#else
360	cpu_rev_var_ls	CPU_REV(3, 0)
361#endif
362	ret
363check_erratum_custom_end cortex_a76, ERRATUM(1165522)
364
365add_erratum_entry cortex_a76, ERRATUM(1165522), ERRATA_A76_1165522
366
367workaround_reset_start cortex_a76, ERRATUM(1207823), ERRATA_A76_1207823
368	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, BIT(11)
369workaround_reset_end cortex_a76, ERRATUM(1207823)
370
371check_erratum_ls cortex_a76, ERRATUM(1207823), CPU_REV(2, 0)
372
373workaround_reset_start cortex_a76, ERRATUM(1220197), ERRATA_A76_1220197
374	sysreg_bit_set CORTEX_A76_CPUECTLR_EL1, CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
375workaround_reset_end cortex_a76, ERRATUM(1220197)
376
377check_erratum_ls cortex_a76, ERRATUM(1220197), CPU_REV(2, 0)
378
379workaround_reset_start cortex_a76, ERRATUM(1257314), ERRATA_A76_1257314
380	sysreg_bit_set CORTEX_A76_CPUACTLR3_EL1, CORTEX_A76_CPUACTLR3_EL1_BIT_10
381workaround_reset_end cortex_a76, ERRATUM(1257314)
382
383check_erratum_ls cortex_a76, ERRATUM(1257314), CPU_REV(3, 0)
384
385workaround_reset_start cortex_a76, ERRATUM(1262606), ERRATA_A76_1262606
386	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
387workaround_reset_end cortex_a76, ERRATUM(1262606)
388
389check_erratum_ls cortex_a76, ERRATUM(1262606), CPU_REV(3, 0)
390
391workaround_reset_start cortex_a76, ERRATUM(1262888), ERRATA_A76_1262888
392	sysreg_bit_set CORTEX_A76_CPUECTLR_EL1, CORTEX_A76_CPUECTLR_EL1_BIT_51
393workaround_reset_end cortex_a76, ERRATUM(1262888)
394
395check_erratum_ls cortex_a76, ERRATUM(1262888), CPU_REV(3, 0)
396
397workaround_reset_start cortex_a76, ERRATUM(1275112), ERRATA_A76_1275112
398	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
399workaround_reset_end cortex_a76, ERRATUM(1275112)
400
401check_erratum_ls cortex_a76, ERRATUM(1275112), CPU_REV(3, 0)
402
403check_erratum_custom_start cortex_a76, ERRATUM(1286807)
404#if ERRATA_A76_1286807
405	mov x0, #ERRATA_APPLIES
406#else
407	cpu_rev_var_ls	CPU_REV(3, 0)
408#endif
409	ret
410check_erratum_custom_end cortex_a76, ERRATUM(1286807)
411
412add_erratum_entry cortex_a76, ERRATUM(1286807), ERRATA_A76_1286807
413
414workaround_reset_start cortex_a76, ERRATUM(1791580), ERRATA_A76_1791580
415	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_BIT_2
416workaround_reset_end cortex_a76, ERRATUM(1791580)
417
418check_erratum_ls cortex_a76, ERRATUM(1791580), CPU_REV(4, 0)
419
420workaround_reset_start cortex_a76, ERRATUM(1868343), ERRATA_A76_1868343
421	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
422workaround_reset_end cortex_a76, ERRATUM(1868343)
423
424check_erratum_ls cortex_a76, ERRATUM(1868343), CPU_REV(4, 0)
425
426workaround_reset_start cortex_a76, ERRATUM(1946160), ERRATA_A76_1946160
427	mov	x0, #3
428	msr	S3_6_C15_C8_0, x0
429	ldr	x0, =0x10E3900002
430	msr	S3_6_C15_C8_2, x0
431	ldr	x0, =0x10FFF00083
432	msr	S3_6_C15_C8_3, x0
433	ldr	x0, =0x2001003FF
434	msr	S3_6_C15_C8_1, x0
435
436	mov	x0, #4
437	msr	S3_6_C15_C8_0, x0
438	ldr	x0, =0x10E3800082
439	msr	S3_6_C15_C8_2, x0
440	ldr	x0, =0x10FFF00083
441	msr	S3_6_C15_C8_3, x0
442	ldr	x0, =0x2001003FF
443	msr	S3_6_C15_C8_1, x0
444
445	mov	x0, #5
446	msr	S3_6_C15_C8_0, x0
447	ldr	x0, =0x10E3800200
448	msr	S3_6_C15_C8_2, x0
449	ldr	x0, =0x10FFF003E0
450	msr	S3_6_C15_C8_3, x0
451	ldr	x0, =0x2001003FF
452	msr	S3_6_C15_C8_1, x0
453workaround_reset_end cortex_a76, ERRATUM(1946160)
454
455check_erratum_chosen cortex_a76, ERRATUM(1946160), ERRATA_A76_1946160
456
457workaround_reset_start cortex_a76, ERRATUM(2356586), ERRATA_A76_2356586
458	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, BIT(0)
459workaround_reset_end cortex_a76, ERRATUM(2356586)
460
461check_erratum_chosen cortex_a76, ERRATUM(2356586), ERRATA_A76_2356586
462
463workaround_runtime_start cortex_a76, ERRATUM(2743102), ERRATA_A76_2743102
464	/* dsb before isb of power down sequence */
465	dsb	sy
466workaround_runtime_end cortex_a76, ERRATUM(2743102)
467
468check_erratum_ls cortex_a76, ERRATUM(2743102), CPU_REV(4, 1)
469
470workaround_reset_start cortex_a76, ERRATUM(3888013), ERRATA_A76_3888013
471	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, BIT(22)
472workaround_reset_end cortex_a76, ERRATUM(3888013)
473
474check_erratum_chosen cortex_a76, ERRATUM(3888013), ERRATA_A76_3888013
475
476check_erratum_chosen cortex_a76, CVE(2018, 3639), WORKAROUND_CVE_2018_3639
477
478func cortex_a76_disable_wa_cve_2018_3639
479	sysreg_bit_clear CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
480	isb
481	ret
482endfunc cortex_a76_disable_wa_cve_2018_3639
483
484check_erratum_chosen cortex_a76, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
485
486/* erratum has no workaround in the cpu. Generic code must take care */
487add_erratum_entry cortex_a76, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
488
489cpu_reset_func_start cortex_a76
490
491#if WORKAROUND_CVE_2018_3639
492	/* If the PE implements SSBS, we don't need the dynamic workaround */
493	mrs	x0, id_aa64pfr1_el1
494	lsr	x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
495	and	x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
496#if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
497	cmp	x0, 0
498	ASM_ASSERT(ne)
499#endif
500#if DYNAMIC_WORKAROUND_CVE_2018_3639
501	cbnz	x0, 1f
502	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
503	isb
504
505#ifdef IMAGE_BL31
506	/*
507	 * The Cortex-A76 generic vectors are overwritten to use the vectors
508	 * defined above. This is required in order to apply mitigation
509	 * against CVE-2018-3639 on exception entry from lower ELs.
510	 * If the below vector table is used, skip overriding it again for
511	 *  CVE_2022_23960 as both use the same vbar.
512	 */
513	override_vector_table cortex_a76_wa_cve_vbar
514	isb
515	b	2f
516#endif /* IMAGE_BL31 */
517
5181:
519#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
520#endif /* WORKAROUND_CVE_2018_3639 */
521
522#if IMAGE_BL31 && WORKAROUND_CVE_2022_23960
523	/*
524	 * The Cortex-A76 generic vectors are overridden to apply errata
525	 * mitigation on exception entry from lower ELs. This will be bypassed
526	 * if DYNAMIC_WORKAROUND_CVE_2018_3639 has overridden the vectors.
527	 */
528	override_vector_table cortex_a76_wa_cve_vbar
529	isb
530#endif /* IMAGE_BL31 && WORKAROUND_CVE_2022_23960 */
5312:
532cpu_reset_func_end cortex_a76
533
534	/* ---------------------------------------------
535	 * HW will do the cache maintenance while powering down
536	 * ---------------------------------------------
537	 */
538func cortex_a76_core_pwr_dwn
539	/* ---------------------------------------------
540	 * Enable CPU power down bit in power control register
541	 * ---------------------------------------------
542	 */
543	sysreg_bit_set CORTEX_A76_CPUPWRCTLR_EL1, CORTEX_A76_CORE_PWRDN_EN_MASK
544
545	apply_erratum cortex_a76, ERRATUM(2743102), ERRATA_A76_2743102
546
547	isb
548	ret
549endfunc cortex_a76_core_pwr_dwn
550
551	/* ---------------------------------------------
552	 * This function provides cortex_a76 specific
553	 * register information for crash reporting.
554	 * It needs to return with x6 pointing to
555	 * a list of register names in ascii and
556	 * x8 - x15 having values of registers to be
557	 * reported.
558	 * ---------------------------------------------
559	 */
560.section .rodata.cortex_a76_regs, "aS"
561cortex_a76_regs:  /* The ascii list of register names to be reported */
562	.asciz	"cpuectlr_el1", ""
563
564func cortex_a76_cpu_reg_dump
565	adr	x6, cortex_a76_regs
566	mrs	x8, CORTEX_A76_CPUECTLR_EL1
567	ret
568endfunc cortex_a76_cpu_reg_dump
569
570declare_cpu_ops cortex_a76, CORTEX_A76_MIDR, \
571	cortex_a76_reset_func, \
572	cortex_a76_core_pwr_dwn
573