xref: /rk3399_ARM-atf/lib/cpus/aarch64/cortex_a76.S (revision c71793c6476fa2828f866b8d7b272289f0d9a15c)
1/*
2 * Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <common/bl_common.h>
10#include <cortex_a76.h>
11#include <cpu_macros.S>
12#include <plat_macros.S>
13#include <services/arm_arch_svc.h>
14#include "wa_cve_2022_23960_bhb.S"
15
16/* Hardware handled coherency */
17#if HW_ASSISTED_COHERENCY == 0
18#error "Cortex-A76 must be compiled with HW_ASSISTED_COHERENCY enabled"
19#endif
20
21/* 64-bit only core */
22#if CTX_INCLUDE_AARCH32_REGS == 1
23#error "Cortex-A76 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
24#endif
25
26#define ESR_EL3_A64_SMC0	0x5e000000
27#define ESR_EL3_A32_SMC0	0x4e000000
28
29#if DYNAMIC_WORKAROUND_CVE_2018_3639
30	/*
31	 * This macro applies the mitigation for CVE-2018-3639.
32	 * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
33	 * SMC calls from a lower EL running in AArch32 or AArch64
34	 * will go through the fast and return early.
35	 *
36	 * The macro saves x2-x3 to the context. In the fast path
37	 * x0-x3 registers do not need to be restored as the calling
38	 * context will have saved them. The macro also saves
39	 * x29-x30 to the context in the sync_exception path.
40	 */
41	.macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
42	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
43	.if \_is_sync_exception
44	stp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
45	mov_imm	w2, \_esr_el3_val
46	bl	apply_cve_2018_3639_sync_wa
47	ldp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
48	.endif
49	/*
50	 * Always enable v4 mitigation during EL3 execution. This is not
51	 * required for the fast path above because it does not perform any
52	 * memory loads.
53	 */
54	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
55	orr	x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
56	msr	CORTEX_A76_CPUACTLR2_EL1, x2
57	isb
58
59	/*
60	 * The caller may have passed arguments to EL3 via x2-x3.
61	 * Restore these registers from the context before jumping to the
62	 * main runtime vector table entry.
63	 */
64	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
65	.endm
66#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
67
68#if DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960
69vector_base cortex_a76_wa_cve_vbar
70
71	/* ---------------------------------------------------------------------
72	 * Current EL with SP_EL0 : 0x0 - 0x200
73	 * ---------------------------------------------------------------------
74	 */
75vector_entry cortex_a76_sync_exception_sp_el0
76	b	sync_exception_sp_el0
77end_vector_entry cortex_a76_sync_exception_sp_el0
78
79vector_entry cortex_a76_irq_sp_el0
80	b	irq_sp_el0
81end_vector_entry cortex_a76_irq_sp_el0
82
83vector_entry cortex_a76_fiq_sp_el0
84	b	fiq_sp_el0
85end_vector_entry cortex_a76_fiq_sp_el0
86
87vector_entry cortex_a76_serror_sp_el0
88	b	serror_sp_el0
89end_vector_entry cortex_a76_serror_sp_el0
90
91	/* ---------------------------------------------------------------------
92	 * Current EL with SP_ELx: 0x200 - 0x400
93	 * ---------------------------------------------------------------------
94	 */
95vector_entry cortex_a76_sync_exception_sp_elx
96	b	sync_exception_sp_elx
97end_vector_entry cortex_a76_sync_exception_sp_elx
98
99vector_entry cortex_a76_irq_sp_elx
100	b	irq_sp_elx
101end_vector_entry cortex_a76_irq_sp_elx
102
103vector_entry cortex_a76_fiq_sp_elx
104	b	fiq_sp_elx
105end_vector_entry cortex_a76_fiq_sp_elx
106
107vector_entry cortex_a76_serror_sp_elx
108	b	serror_sp_elx
109end_vector_entry cortex_a76_serror_sp_elx
110
111	/* ---------------------------------------------------------------------
112	 * Lower EL using AArch64 : 0x400 - 0x600
113	 * ---------------------------------------------------------------------
114	 */
115vector_entry cortex_a76_sync_exception_aarch64
116
117#if WORKAROUND_CVE_2022_23960
118	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
119#endif /* WORKAROUND_CVE_2022_23960 */
120
121#if DYNAMIC_WORKAROUND_CVE_2018_3639
122	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
123#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
124
125	b	sync_exception_aarch64
126end_vector_entry cortex_a76_sync_exception_aarch64
127
128vector_entry cortex_a76_irq_aarch64
129
130#if WORKAROUND_CVE_2022_23960
131	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
132#endif /* WORKAROUND_CVE_2022_23960 */
133
134#if DYNAMIC_WORKAROUND_CVE_2018_3639
135	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
136#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
137
138	b	irq_aarch64
139end_vector_entry cortex_a76_irq_aarch64
140
141vector_entry cortex_a76_fiq_aarch64
142
143#if WORKAROUND_CVE_2022_23960
144	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
145#endif /* WORKAROUND_CVE_2022_23960 */
146
147#if DYNAMIC_WORKAROUND_CVE_2018_3639
148	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
149#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
150
151	b	fiq_aarch64
152end_vector_entry cortex_a76_fiq_aarch64
153
154vector_entry cortex_a76_serror_aarch64
155
156#if WORKAROUND_CVE_2022_23960
157	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
158#endif /* WORKAROUND_CVE_2022_23960 */
159
160#if DYNAMIC_WORKAROUND_CVE_2018_3639
161	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
162#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
163
164	b	serror_aarch64
165end_vector_entry cortex_a76_serror_aarch64
166
167	/* ---------------------------------------------------------------------
168	 * Lower EL using AArch32 : 0x600 - 0x800
169	 * ---------------------------------------------------------------------
170	 */
171vector_entry cortex_a76_sync_exception_aarch32
172
173#if WORKAROUND_CVE_2022_23960
174	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
175#endif /* WORKAROUND_CVE_2022_23960 */
176
177#if DYNAMIC_WORKAROUND_CVE_2018_3639
178	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
179#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
180
181	b	sync_exception_aarch32
182end_vector_entry cortex_a76_sync_exception_aarch32
183
184vector_entry cortex_a76_irq_aarch32
185
186#if WORKAROUND_CVE_2022_23960
187	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
188#endif /* WORKAROUND_CVE_2022_23960 */
189
190#if DYNAMIC_WORKAROUND_CVE_2018_3639
191	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
192#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
193
194	b	irq_aarch32
195end_vector_entry cortex_a76_irq_aarch32
196
197vector_entry cortex_a76_fiq_aarch32
198
199#if WORKAROUND_CVE_2022_23960
200	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
201#endif /* WORKAROUND_CVE_2022_23960 */
202
203#if DYNAMIC_WORKAROUND_CVE_2018_3639
204	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
205#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
206
207	b	fiq_aarch32
208end_vector_entry cortex_a76_fiq_aarch32
209
210vector_entry cortex_a76_serror_aarch32
211
212#if WORKAROUND_CVE_2022_23960
213	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
214#endif /* WORKAROUND_CVE_2022_23960 */
215
216#if DYNAMIC_WORKAROUND_CVE_2018_3639
217	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
218#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
219
220	b	serror_aarch32
221end_vector_entry cortex_a76_serror_aarch32
222#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960 */
223
224#if DYNAMIC_WORKAROUND_CVE_2018_3639
225	/*
226	 * -----------------------------------------------------------------
227	 * This function applies the mitigation for CVE-2018-3639
228	 * specifically for sync exceptions. It implements a fast path
229	 * where `SMCCC_ARCH_WORKAROUND_2` SMC calls from a lower EL
230	 * running in AArch64 will go through the fast and return early.
231	 *
232	 * In the fast path x0-x3 registers do not need to be restored as the
233	 * calling context will have saved them.
234	 *
235	 * Caller must pass value of esr_el3 to compare via x2.
236	 * Save and restore these registers outside of this function from the
237	 * context before jumping to the main runtime vector table entry.
238	 *
239	 * Shall clobber: x0-x3, x30
240	 * -----------------------------------------------------------------
241	 */
242func apply_cve_2018_3639_sync_wa
243	/*
244	 * Ensure SMC is coming from A64/A32 state on #0
245	 * with W0 = SMCCC_ARCH_WORKAROUND_2
246	 *
247	 * This sequence evaluates as:
248	 *    (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
249	 * allowing use of a single branch operation
250	 * X2 populated outside this function with the SMC FID.
251	 */
252	orr	w3, wzr, #SMCCC_ARCH_WORKAROUND_2
253	cmp	x0, x3
254	mrs	x3, esr_el3
255
256	ccmp	w2, w3, #0, eq
257	/*
258	 * Static predictor will predict a fall-through, optimizing
259	 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
260	 */
261	bne	1f
262
263	/*
264	* The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
265	* fast path.
266	*/
267	cmp	x1, xzr /* enable/disable check */
268
269	/*
270	 * When the calling context wants mitigation disabled,
271	 * we program the mitigation disable function in the
272	 * CPU context, which gets invoked on subsequent exits from
273	 * EL3 via the `el3_exit` function. Otherwise NULL is
274	 * programmed in the CPU context, which results in caller's
275	 * inheriting the EL3 mitigation state (enabled) on subsequent
276	 * `el3_exit`.
277	 */
278	mov	x0, xzr
279	adr	x1, cortex_a76_disable_wa_cve_2018_3639
280	csel	x1, x1, x0, eq
281	str	x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
282
283	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
284	orr	x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
285	bic	x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
286	csel	x3, x3, x1, eq
287	msr	CORTEX_A76_CPUACTLR2_EL1, x3
288	ldp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
289	/*
290	* `SMCCC_ARCH_WORKAROUND_2`fast path return to lower EL.
291	*/
292	exception_return /* exception_return contains ISB */
2931:
294	ret
295endfunc apply_cve_2018_3639_sync_wa
296#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
297
298	/* --------------------------------------------------
299	 * Errata Workaround for Cortex A76 Errata #1073348.
300	 * This applies only to revision <= r1p0 of Cortex A76.
301	 * Inputs:
302	 * x0: variant[4:7] and revision[0:3] of current cpu.
303	 * Shall clobber: x0-x17
304	 * --------------------------------------------------
305	 */
306func errata_a76_1073348_wa
307	/*
308	 * Compare x0 against revision r1p0
309	 */
310	mov	x17, x30
311	bl	check_errata_1073348
312	cbz	x0, 1f
313	mrs	x1, CORTEX_A76_CPUACTLR_EL1
314	orr	x1, x1 ,#CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
315	msr	CORTEX_A76_CPUACTLR_EL1, x1
316	isb
3171:
318	ret	x17
319endfunc errata_a76_1073348_wa
320
321func check_errata_1073348
322	mov	x1, #0x10
323	b	cpu_rev_var_ls
324endfunc check_errata_1073348
325
326	/* --------------------------------------------------
327	 * Errata Workaround for Cortex A76 Errata #1130799.
328	 * This applies only to revision <= r2p0 of Cortex A76.
329	 * Inputs:
330	 * x0: variant[4:7] and revision[0:3] of current cpu.
331	 * Shall clobber: x0-x17
332	 * --------------------------------------------------
333	 */
334func errata_a76_1130799_wa
335	/*
336	 * Compare x0 against revision r2p0
337	 */
338	mov	x17, x30
339	bl	check_errata_1130799
340	cbz	x0, 1f
341	mrs	x1, CORTEX_A76_CPUACTLR2_EL1
342	orr	x1, x1 ,#(1 << 59)
343	msr	CORTEX_A76_CPUACTLR2_EL1, x1
344	isb
3451:
346	ret	x17
347endfunc errata_a76_1130799_wa
348
349func check_errata_1130799
350	mov	x1, #0x20
351	b	cpu_rev_var_ls
352endfunc check_errata_1130799
353
354	/* --------------------------------------------------
355	 * Errata Workaround for Cortex A76 Errata #1220197.
356	 * This applies only to revision <= r2p0 of Cortex A76.
357	 * Inputs:
358	 * x0: variant[4:7] and revision[0:3] of current cpu.
359	 * Shall clobber: x0-x17
360	 * --------------------------------------------------
361	 */
362func errata_a76_1220197_wa
363/*
364 * Compare x0 against revision r2p0
365 */
366	mov	x17, x30
367	bl	check_errata_1220197
368	cbz	x0, 1f
369	mrs	x1, CORTEX_A76_CPUECTLR_EL1
370	orr	x1, x1, #CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
371	msr	CORTEX_A76_CPUECTLR_EL1, x1
372	isb
3731:
374	ret	x17
375endfunc errata_a76_1220197_wa
376
377func check_errata_1220197
378	mov	x1, #0x20
379	b	cpu_rev_var_ls
380endfunc check_errata_1220197
381
382	/* --------------------------------------------------
383	 * Errata Workaround for Cortex A76 Errata #1257314.
384	 * This applies only to revision <= r3p0 of Cortex A76.
385	 * Inputs:
386	 * x0: variant[4:7] and revision[0:3] of current cpu.
387	 * Shall clobber: x0-x17
388	 * --------------------------------------------------
389	 */
390func errata_a76_1257314_wa
391	/*
392	 * Compare x0 against revision r3p0
393	 */
394	mov	x17, x30
395	bl	check_errata_1257314
396	cbz	x0, 1f
397	mrs	x1, CORTEX_A76_CPUACTLR3_EL1
398	orr	x1, x1, CORTEX_A76_CPUACTLR3_EL1_BIT_10
399	msr	CORTEX_A76_CPUACTLR3_EL1, x1
400	isb
4011:
402	ret	x17
403endfunc errata_a76_1257314_wa
404
405func check_errata_1257314
406	mov	x1, #0x30
407	b	cpu_rev_var_ls
408endfunc check_errata_1257314
409
410	/* --------------------------------------------------
411	 * Errata Workaround for Cortex A76 Errata #1262888.
412	 * This applies only to revision <= r3p0 of Cortex A76.
413	 * Inputs:
414	 * x0: variant[4:7] and revision[0:3] of current cpu.
415	 * Shall clobber: x0-x17
416	 * --------------------------------------------------
417	 */
418func errata_a76_1262888_wa
419	/*
420	 * Compare x0 against revision r3p0
421	 */
422	mov	x17, x30
423	bl	check_errata_1262888
424	cbz	x0, 1f
425	mrs	x1, CORTEX_A76_CPUECTLR_EL1
426	orr	x1, x1, CORTEX_A76_CPUECTLR_EL1_BIT_51
427	msr	CORTEX_A76_CPUECTLR_EL1, x1
428	isb
4291:
430	ret	x17
431endfunc errata_a76_1262888_wa
432
433func check_errata_1262888
434	mov	x1, #0x30
435	b	cpu_rev_var_ls
436endfunc check_errata_1262888
437
438	/* ---------------------------------------------------
439	 * Errata Workaround for Cortex A76 Errata #1286807.
440	 * This applies only to revision <= r3p0 of Cortex A76.
441	 * Due to the nature of the errata it is applied unconditionally
442	 * when built in, report it as applicable in this case
443	 * ---------------------------------------------------
444	 */
445func check_errata_1286807
446#if ERRATA_A76_1286807
447	mov x0, #ERRATA_APPLIES
448	ret
449#else
450	mov	x1, #0x30
451	b	cpu_rev_var_ls
452#endif
453endfunc check_errata_1286807
454
455	/* --------------------------------------------------
456	 * Errata workaround for Cortex A76 Errata #1791580.
457	 * This applies to revisions <= r4p0 of Cortex A76.
458	 * Inputs:
459	 * x0: variant[4:7] and revision[0:3] of current cpu.
460	 * Shall clobber: x0-x17
461	 * --------------------------------------------------
462	 */
463func errata_a76_1791580_wa
464	/* Compare x0 against revision r4p0 */
465	mov	x17, x30
466	bl	check_errata_1791580
467	cbz	x0, 1f
468	mrs	x1, CORTEX_A76_CPUACTLR2_EL1
469	orr	x1, x1, CORTEX_A76_CPUACTLR2_EL1_BIT_2
470	msr	CORTEX_A76_CPUACTLR2_EL1, x1
471	isb
4721:
473	ret	x17
474endfunc errata_a76_1791580_wa
475
476func check_errata_1791580
477	/* Applies to everything <=r4p0. */
478	mov	x1, #0x40
479	b	cpu_rev_var_ls
480endfunc check_errata_1791580
481
482	/* --------------------------------------------------
483	 * Errata Workaround for Cortex A76 Errata #1262606,
484	 * #1275112, and #1868343.  #1262606 and #1275112
485	 * apply to revisions <= r3p0 and #1868343 applies to
486	 * revisions <= r4p0.
487	 * Inputs:
488	 * x0: variant[4:7] and revision[0:3] of current cpu.
489	 * Shall clobber: x0-x17
490	 * --------------------------------------------------
491	 */
492
493func errata_a76_1262606_1275112_1868343_wa
494	mov	x17, x30
495
496/* Check for <= r3p0 cases and branch if check passes. */
497#if ERRATA_A76_1262606 || ERRATA_A76_1275112
498	bl	check_errata_1262606
499	cbnz	x0, 1f
500#endif
501
502/* Check for <= r4p0 cases and branch if check fails. */
503#if ERRATA_A76_1868343
504	bl	check_errata_1868343
505	cbz	x0, 2f
506#endif
5071:
508	mrs	x1, CORTEX_A76_CPUACTLR_EL1
509	orr	x1, x1, #CORTEX_A76_CPUACTLR_EL1_BIT_13
510	msr	CORTEX_A76_CPUACTLR_EL1, x1
511	isb
5122:
513	ret	x17
514endfunc errata_a76_1262606_1275112_1868343_wa
515
516func check_errata_1262606
517	mov	x1, #0x30
518	b	cpu_rev_var_ls
519endfunc check_errata_1262606
520
521func check_errata_1275112
522	mov	x1, #0x30
523	b	cpu_rev_var_ls
524endfunc check_errata_1275112
525
526func check_errata_1868343
527	mov	x1, #0x40
528	b	cpu_rev_var_ls
529endfunc check_errata_1868343
530
531/* --------------------------------------------------
532 * Errata Workaround for A76 Erratum 1946160.
533 * This applies to revisions r3p0 - r4p1 of A76.
534 * It also exists in r0p0 - r2p0 but there is no fix
535 * in those revisions.
536 * Inputs:
537 * x0: variant[4:7] and revision[0:3] of current cpu.
538 * Shall clobber: x0-x17
539 * --------------------------------------------------
540 */
541func errata_a76_1946160_wa
542	/* Compare x0 against revisions r3p0 - r4p1 */
543	mov	x17, x30
544	bl	check_errata_1946160
545	cbz	x0, 1f
546
547	mov	x0, #3
548	msr	S3_6_C15_C8_0, x0
549	ldr	x0, =0x10E3900002
550	msr	S3_6_C15_C8_2, x0
551	ldr	x0, =0x10FFF00083
552	msr	S3_6_C15_C8_3, x0
553	ldr	x0, =0x2001003FF
554	msr	S3_6_C15_C8_1, x0
555
556	mov	x0, #4
557	msr	S3_6_C15_C8_0, x0
558	ldr	x0, =0x10E3800082
559	msr	S3_6_C15_C8_2, x0
560	ldr	x0, =0x10FFF00083
561	msr	S3_6_C15_C8_3, x0
562	ldr	x0, =0x2001003FF
563	msr	S3_6_C15_C8_1, x0
564
565	mov	x0, #5
566	msr	S3_6_C15_C8_0, x0
567	ldr	x0, =0x10E3800200
568	msr	S3_6_C15_C8_2, x0
569	ldr	x0, =0x10FFF003E0
570	msr	S3_6_C15_C8_3, x0
571	ldr	x0, =0x2001003FF
572	msr	S3_6_C15_C8_1, x0
573
574	isb
5751:
576	ret	x17
577endfunc errata_a76_1946160_wa
578
579func check_errata_1946160
580	/* Applies to revisions r3p0 - r4p1. */
581	mov	x1, #0x30
582	mov	x2, #0x41
583	b	cpu_rev_var_range
584endfunc check_errata_1946160
585
586func check_errata_cve_2018_3639
587#if WORKAROUND_CVE_2018_3639
588	mov	x0, #ERRATA_APPLIES
589#else
590	mov	x0, #ERRATA_MISSING
591#endif
592	ret
593endfunc check_errata_cve_2018_3639
594
595func cortex_a76_disable_wa_cve_2018_3639
596	mrs	x0, CORTEX_A76_CPUACTLR2_EL1
597	bic	x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
598	msr	CORTEX_A76_CPUACTLR2_EL1, x0
599	isb
600	ret
601endfunc cortex_a76_disable_wa_cve_2018_3639
602
603	/* --------------------------------------------------------------
604	 * Errata Workaround for Cortex A76 Errata #1165522.
605	 * This applies only to revisions <= r3p0 of Cortex A76.
606	 * Due to the nature of the errata it is applied unconditionally
607	 * when built in, report it as applicable in this case
608	 * --------------------------------------------------------------
609	 */
610func check_errata_1165522
611#if ERRATA_A76_1165522
612	mov	x0, #ERRATA_APPLIES
613	ret
614#else
615	mov	x1, #0x30
616	b	cpu_rev_var_ls
617#endif
618endfunc check_errata_1165522
619
620func check_errata_cve_2022_23960
621#if WORKAROUND_CVE_2022_23960
622	mov	x0, #ERRATA_APPLIES
623#else
624	mov	x0, #ERRATA_MISSING
625#endif /* WORKAROUND_CVE_2022_23960 */
626	ret
627endfunc check_errata_cve_2022_23960
628
629	/* -------------------------------------------------
630	 * The CPU Ops reset function for Cortex-A76.
631	 * Shall clobber: x0-x19
632	 * -------------------------------------------------
633	 */
634func cortex_a76_reset_func
635	mov	x19, x30
636	bl	cpu_get_rev_var
637	mov	x18, x0
638
639#if ERRATA_A76_1073348
640	mov	x0, x18
641	bl	errata_a76_1073348_wa
642#endif
643
644#if ERRATA_A76_1130799
645	mov	x0, x18
646	bl	errata_a76_1130799_wa
647#endif
648
649#if ERRATA_A76_1220197
650	mov	x0, x18
651	bl	errata_a76_1220197_wa
652#endif
653
654#if ERRATA_A76_1257314
655	mov	x0, x18
656	bl	errata_a76_1257314_wa
657#endif
658
659#if ERRATA_A76_1262606 || ERRATA_A76_1275112 || ERRATA_A76_1868343
660	mov	x0, x18
661	bl	errata_a76_1262606_1275112_1868343_wa
662#endif
663
664#if ERRATA_A76_1262888
665	mov	x0, x18
666	bl	errata_a76_1262888_wa
667#endif
668
669#if ERRATA_A76_1791580
670	mov	x0, x18
671	bl	errata_a76_1791580_wa
672#endif
673
674#if ERRATA_A76_1946160
675	mov	x0, x18
676	bl	errata_a76_1946160_wa
677#endif
678
679#if WORKAROUND_CVE_2018_3639
680	/* If the PE implements SSBS, we don't need the dynamic workaround */
681	mrs	x0, id_aa64pfr1_el1
682	lsr	x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
683	and	x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
684#if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
685	cmp	x0, 0
686	ASM_ASSERT(ne)
687#endif
688#if DYNAMIC_WORKAROUND_CVE_2018_3639
689	cbnz	x0, 1f
690	mrs	x0, CORTEX_A76_CPUACTLR2_EL1
691	orr	x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
692	msr	CORTEX_A76_CPUACTLR2_EL1, x0
693	isb
694
695#ifdef IMAGE_BL31
696	/*
697	 * The Cortex-A76 generic vectors are overwritten to use the vectors
698	 * defined above. This is required in order to apply mitigation
699	 * against CVE-2018-3639 on exception entry from lower ELs.
700	 * If the below vector table is used, skip overriding it again for
701	 *  CVE_2022_23960 as both use the same vbar.
702	 */
703	adr	x0, cortex_a76_wa_cve_vbar
704	msr	vbar_el3, x0
705	isb
706	b	2f
707#endif /* IMAGE_BL31 */
708
7091:
710#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
711#endif /* WORKAROUND_CVE_2018_3639 */
712
713#if IMAGE_BL31 && WORKAROUND_CVE_2022_23960
714	/*
715	 * The Cortex-A76 generic vectors are overridden to apply errata
716	 * mitigation on exception entry from lower ELs. This will be bypassed
717	 * if DYNAMIC_WORKAROUND_CVE_2018_3639 has overridden the vectors.
718	 */
719	adr	x0, cortex_a76_wa_cve_vbar
720	msr	vbar_el3, x0
721	isb
722#endif /* IMAGE_BL31 && WORKAROUND_CVE_2022_23960 */
7232:
724
725#if ERRATA_DSU_798953
726	bl	errata_dsu_798953_wa
727#endif
728
729#if ERRATA_DSU_936184
730	bl	errata_dsu_936184_wa
731#endif
732
733	ret	x19
734endfunc cortex_a76_reset_func
735
736	/* ---------------------------------------------
737	 * HW will do the cache maintenance while powering down
738	 * ---------------------------------------------
739	 */
740func cortex_a76_core_pwr_dwn
741	/* ---------------------------------------------
742	 * Enable CPU power down bit in power control register
743	 * ---------------------------------------------
744	 */
745	mrs	x0, CORTEX_A76_CPUPWRCTLR_EL1
746	orr	x0, x0, #CORTEX_A76_CORE_PWRDN_EN_MASK
747	msr	CORTEX_A76_CPUPWRCTLR_EL1, x0
748	isb
749	ret
750endfunc cortex_a76_core_pwr_dwn
751
752#if REPORT_ERRATA
753/*
754 * Errata printing function for Cortex A76. Must follow AAPCS.
755 */
756func cortex_a76_errata_report
757	stp	x8, x30, [sp, #-16]!
758
759	bl	cpu_get_rev_var
760	mov	x8, x0
761
762	/*
763	 * Report all errata. The revision-variant information is passed to
764	 * checking functions of each errata.
765	 */
766	report_errata ERRATA_A76_1073348, cortex_a76, 1073348
767	report_errata ERRATA_A76_1130799, cortex_a76, 1130799
768	report_errata ERRATA_A76_1220197, cortex_a76, 1220197
769	report_errata ERRATA_A76_1257314, cortex_a76, 1257314
770	report_errata ERRATA_A76_1262606, cortex_a76, 1262606
771	report_errata ERRATA_A76_1262888, cortex_a76, 1262888
772	report_errata ERRATA_A76_1275112, cortex_a76, 1275112
773	report_errata ERRATA_A76_1286807, cortex_a76, 1286807
774	report_errata ERRATA_A76_1791580, cortex_a76, 1791580
775	report_errata ERRATA_A76_1165522, cortex_a76, 1165522
776	report_errata ERRATA_A76_1868343, cortex_a76, 1868343
777	report_errata ERRATA_A76_1946160, cortex_a76, 1946160
778	report_errata WORKAROUND_CVE_2018_3639, cortex_a76, cve_2018_3639
779	report_errata ERRATA_DSU_798953, cortex_a76, dsu_798953
780	report_errata ERRATA_DSU_936184, cortex_a76, dsu_936184
781	report_errata WORKAROUND_CVE_2022_23960, cortex_a76, cve_2022_23960
782
783	ldp	x8, x30, [sp], #16
784	ret
785endfunc cortex_a76_errata_report
786#endif
787
788	/* ---------------------------------------------
789	 * This function provides cortex_a76 specific
790	 * register information for crash reporting.
791	 * It needs to return with x6 pointing to
792	 * a list of register names in ascii and
793	 * x8 - x15 having values of registers to be
794	 * reported.
795	 * ---------------------------------------------
796	 */
797.section .rodata.cortex_a76_regs, "aS"
798cortex_a76_regs:  /* The ascii list of register names to be reported */
799	.asciz	"cpuectlr_el1", ""
800
801func cortex_a76_cpu_reg_dump
802	adr	x6, cortex_a76_regs
803	mrs	x8, CORTEX_A76_CPUECTLR_EL1
804	ret
805endfunc cortex_a76_cpu_reg_dump
806
807declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
808	cortex_a76_reset_func, \
809	CPU_NO_EXTRA1_FUNC, \
810	cortex_a76_disable_wa_cve_2018_3639, \
811	CPU_NO_EXTRA3_FUNC, \
812	cortex_a76_core_pwr_dwn
813