xref: /rk3399_ARM-atf/lib/cpus/aarch64/cortex_a76.S (revision 6dc5979a6cb2121e4c16e7bd62e24030e0f42755)
1/*
2 * Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <common/bl_common.h>
10#include <cortex_a76.h>
11#include <cpu_macros.S>
12#include <plat_macros.S>
13#include <services/arm_arch_svc.h>
14#include "wa_cve_2022_23960_bhb.S"
15
16/* Hardware handled coherency */
17#if HW_ASSISTED_COHERENCY == 0
18#error "Cortex-A76 must be compiled with HW_ASSISTED_COHERENCY enabled"
19#endif
20	.globl cortex_a76_reset_func
21	.globl cortex_a76_core_pwr_dwn
22	.globl cortex_a76_disable_wa_cve_2018_3639
23
24/* 64-bit only core */
25#if CTX_INCLUDE_AARCH32_REGS == 1
26#error "Cortex-A76 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
27#endif
28
29#define ESR_EL3_A64_SMC0	0x5e000000
30#define ESR_EL3_A32_SMC0	0x4e000000
31
32#if DYNAMIC_WORKAROUND_CVE_2018_3639
33	/*
34	 * This macro applies the mitigation for CVE-2018-3639.
35	 * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
36	 * SMC calls from a lower EL running in AArch32 or AArch64
37	 * will go through the fast and return early.
38	 *
39	 * The macro saves x2-x3 to the context. In the fast path
40	 * x0-x3 registers do not need to be restored as the calling
41	 * context will have saved them. The macro also saves
42	 * x29-x30 to the context in the sync_exception path.
43	 */
44	.macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
45	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
46	.if \_is_sync_exception
47	stp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
48	mov_imm	w2, \_esr_el3_val
49	bl	apply_cve_2018_3639_sync_wa
50	ldp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
51	.endif
52	/*
53	 * Always enable v4 mitigation during EL3 execution. This is not
54	 * required for the fast path above because it does not perform any
55	 * memory loads.
56	 */
57	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
58	orr	x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
59	msr	CORTEX_A76_CPUACTLR2_EL1, x2
60	isb
61
62	/*
63	 * The caller may have passed arguments to EL3 via x2-x3.
64	 * Restore these registers from the context before jumping to the
65	 * main runtime vector table entry.
66	 */
67	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
68	.endm
69#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
70
71#if DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960
72vector_base cortex_a76_wa_cve_vbar
73
74	/* ---------------------------------------------------------------------
75	 * Current EL with SP_EL0 : 0x0 - 0x200
76	 * ---------------------------------------------------------------------
77	 */
78vector_entry cortex_a76_sync_exception_sp_el0
79	b	sync_exception_sp_el0
80end_vector_entry cortex_a76_sync_exception_sp_el0
81
82vector_entry cortex_a76_irq_sp_el0
83	b	irq_sp_el0
84end_vector_entry cortex_a76_irq_sp_el0
85
86vector_entry cortex_a76_fiq_sp_el0
87	b	fiq_sp_el0
88end_vector_entry cortex_a76_fiq_sp_el0
89
90vector_entry cortex_a76_serror_sp_el0
91	b	serror_sp_el0
92end_vector_entry cortex_a76_serror_sp_el0
93
94	/* ---------------------------------------------------------------------
95	 * Current EL with SP_ELx: 0x200 - 0x400
96	 * ---------------------------------------------------------------------
97	 */
98vector_entry cortex_a76_sync_exception_sp_elx
99	b	sync_exception_sp_elx
100end_vector_entry cortex_a76_sync_exception_sp_elx
101
102vector_entry cortex_a76_irq_sp_elx
103	b	irq_sp_elx
104end_vector_entry cortex_a76_irq_sp_elx
105
106vector_entry cortex_a76_fiq_sp_elx
107	b	fiq_sp_elx
108end_vector_entry cortex_a76_fiq_sp_elx
109
110vector_entry cortex_a76_serror_sp_elx
111	b	serror_sp_elx
112end_vector_entry cortex_a76_serror_sp_elx
113
114	/* ---------------------------------------------------------------------
115	 * Lower EL using AArch64 : 0x400 - 0x600
116	 * ---------------------------------------------------------------------
117	 */
118vector_entry cortex_a76_sync_exception_aarch64
119
120#if WORKAROUND_CVE_2022_23960
121	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
122#endif /* WORKAROUND_CVE_2022_23960 */
123
124#if DYNAMIC_WORKAROUND_CVE_2018_3639
125	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
126#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
127
128	b	sync_exception_aarch64
129end_vector_entry cortex_a76_sync_exception_aarch64
130
131vector_entry cortex_a76_irq_aarch64
132
133#if WORKAROUND_CVE_2022_23960
134	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
135#endif /* WORKAROUND_CVE_2022_23960 */
136
137#if DYNAMIC_WORKAROUND_CVE_2018_3639
138	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
139#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
140
141	b	irq_aarch64
142end_vector_entry cortex_a76_irq_aarch64
143
144vector_entry cortex_a76_fiq_aarch64
145
146#if WORKAROUND_CVE_2022_23960
147	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
148#endif /* WORKAROUND_CVE_2022_23960 */
149
150#if DYNAMIC_WORKAROUND_CVE_2018_3639
151	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
152#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
153
154	b	fiq_aarch64
155end_vector_entry cortex_a76_fiq_aarch64
156
157vector_entry cortex_a76_serror_aarch64
158
159#if WORKAROUND_CVE_2022_23960
160	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
161#endif /* WORKAROUND_CVE_2022_23960 */
162
163#if DYNAMIC_WORKAROUND_CVE_2018_3639
164	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
165#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
166
167	b	serror_aarch64
168end_vector_entry cortex_a76_serror_aarch64
169
170	/* ---------------------------------------------------------------------
171	 * Lower EL using AArch32 : 0x600 - 0x800
172	 * ---------------------------------------------------------------------
173	 */
174vector_entry cortex_a76_sync_exception_aarch32
175
176#if WORKAROUND_CVE_2022_23960
177	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
178#endif /* WORKAROUND_CVE_2022_23960 */
179
180#if DYNAMIC_WORKAROUND_CVE_2018_3639
181	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
182#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
183
184	b	sync_exception_aarch32
185end_vector_entry cortex_a76_sync_exception_aarch32
186
187vector_entry cortex_a76_irq_aarch32
188
189#if WORKAROUND_CVE_2022_23960
190	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
191#endif /* WORKAROUND_CVE_2022_23960 */
192
193#if DYNAMIC_WORKAROUND_CVE_2018_3639
194	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
195#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
196
197	b	irq_aarch32
198end_vector_entry cortex_a76_irq_aarch32
199
200vector_entry cortex_a76_fiq_aarch32
201
202#if WORKAROUND_CVE_2022_23960
203	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
204#endif /* WORKAROUND_CVE_2022_23960 */
205
206#if DYNAMIC_WORKAROUND_CVE_2018_3639
207	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
208#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
209
210	b	fiq_aarch32
211end_vector_entry cortex_a76_fiq_aarch32
212
213vector_entry cortex_a76_serror_aarch32
214
215#if WORKAROUND_CVE_2022_23960
216	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
217#endif /* WORKAROUND_CVE_2022_23960 */
218
219#if DYNAMIC_WORKAROUND_CVE_2018_3639
220	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
221#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
222
223	b	serror_aarch32
224end_vector_entry cortex_a76_serror_aarch32
225#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960 */
226
227#if DYNAMIC_WORKAROUND_CVE_2018_3639
228	/*
229	 * -----------------------------------------------------------------
230	 * This function applies the mitigation for CVE-2018-3639
231	 * specifically for sync exceptions. It implements a fast path
232	 * where `SMCCC_ARCH_WORKAROUND_2` SMC calls from a lower EL
233	 * running in AArch64 will go through the fast and return early.
234	 *
235	 * In the fast path x0-x3 registers do not need to be restored as the
236	 * calling context will have saved them.
237	 *
238	 * Caller must pass value of esr_el3 to compare via x2.
239	 * Save and restore these registers outside of this function from the
240	 * context before jumping to the main runtime vector table entry.
241	 *
242	 * Shall clobber: x0-x3, x30
243	 * -----------------------------------------------------------------
244	 */
245func apply_cve_2018_3639_sync_wa
246	/*
247	 * Ensure SMC is coming from A64/A32 state on #0
248	 * with W0 = SMCCC_ARCH_WORKAROUND_2
249	 *
250	 * This sequence evaluates as:
251	 *    (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
252	 * allowing use of a single branch operation
253	 * X2 populated outside this function with the SMC FID.
254	 */
255	orr	w3, wzr, #SMCCC_ARCH_WORKAROUND_2
256	cmp	x0, x3
257	mrs	x3, esr_el3
258
259	ccmp	w2, w3, #0, eq
260	/*
261	 * Static predictor will predict a fall-through, optimizing
262	 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
263	 */
264	bne	1f
265
266	/*
267	* The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
268	* fast path.
269	*/
270	cmp	x1, xzr /* enable/disable check */
271
272	/*
273	 * When the calling context wants mitigation disabled,
274	 * we program the mitigation disable function in the
275	 * CPU context, which gets invoked on subsequent exits from
276	 * EL3 via the `el3_exit` function. Otherwise NULL is
277	 * programmed in the CPU context, which results in caller's
278	 * inheriting the EL3 mitigation state (enabled) on subsequent
279	 * `el3_exit`.
280	 */
281	mov	x0, xzr
282	adr	x1, cortex_a76_disable_wa_cve_2018_3639
283	csel	x1, x1, x0, eq
284	str	x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
285
286	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
287	orr	x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
288	bic	x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
289	csel	x3, x3, x1, eq
290	msr	CORTEX_A76_CPUACTLR2_EL1, x3
291	ldp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
292	/*
293	* `SMCCC_ARCH_WORKAROUND_2`fast path return to lower EL.
294	*/
295	exception_return /* exception_return contains ISB */
2961:
297	ret
298endfunc apply_cve_2018_3639_sync_wa
299#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
300
301	/* --------------------------------------------------
302	 * Errata Workaround for Cortex A76 Errata #1073348.
303	 * This applies only to revision <= r1p0 of Cortex A76.
304	 * Inputs:
305	 * x0: variant[4:7] and revision[0:3] of current cpu.
306	 * Shall clobber: x0-x17
307	 * --------------------------------------------------
308	 */
309func errata_a76_1073348_wa
310	/*
311	 * Compare x0 against revision r1p0
312	 */
313	mov	x17, x30
314	bl	check_errata_1073348
315	cbz	x0, 1f
316	mrs	x1, CORTEX_A76_CPUACTLR_EL1
317	orr	x1, x1 ,#CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
318	msr	CORTEX_A76_CPUACTLR_EL1, x1
319	isb
3201:
321	ret	x17
322endfunc errata_a76_1073348_wa
323
324func check_errata_1073348
325	mov	x1, #0x10
326	b	cpu_rev_var_ls
327endfunc check_errata_1073348
328
329	/* --------------------------------------------------
330	 * Errata Workaround for Cortex A76 Errata #1130799.
331	 * This applies only to revision <= r2p0 of Cortex A76.
332	 * Inputs:
333	 * x0: variant[4:7] and revision[0:3] of current cpu.
334	 * Shall clobber: x0-x17
335	 * --------------------------------------------------
336	 */
337func errata_a76_1130799_wa
338	/*
339	 * Compare x0 against revision r2p0
340	 */
341	mov	x17, x30
342	bl	check_errata_1130799
343	cbz	x0, 1f
344	mrs	x1, CORTEX_A76_CPUACTLR2_EL1
345	orr	x1, x1 ,#(1 << 59)
346	msr	CORTEX_A76_CPUACTLR2_EL1, x1
347	isb
3481:
349	ret	x17
350endfunc errata_a76_1130799_wa
351
352func check_errata_1130799
353	mov	x1, #0x20
354	b	cpu_rev_var_ls
355endfunc check_errata_1130799
356
357	/* --------------------------------------------------
358	 * Errata Workaround for Cortex A76 Errata #1220197.
359	 * This applies only to revision <= r2p0 of Cortex A76.
360	 * Inputs:
361	 * x0: variant[4:7] and revision[0:3] of current cpu.
362	 * Shall clobber: x0-x17
363	 * --------------------------------------------------
364	 */
365func errata_a76_1220197_wa
366/*
367 * Compare x0 against revision r2p0
368 */
369	mov	x17, x30
370	bl	check_errata_1220197
371	cbz	x0, 1f
372	mrs	x1, CORTEX_A76_CPUECTLR_EL1
373	orr	x1, x1, #CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
374	msr	CORTEX_A76_CPUECTLR_EL1, x1
375	isb
3761:
377	ret	x17
378endfunc errata_a76_1220197_wa
379
380func check_errata_1220197
381	mov	x1, #0x20
382	b	cpu_rev_var_ls
383endfunc check_errata_1220197
384
385	/* --------------------------------------------------
386	 * Errata Workaround for Cortex A76 Errata #1257314.
387	 * This applies only to revision <= r3p0 of Cortex A76.
388	 * Inputs:
389	 * x0: variant[4:7] and revision[0:3] of current cpu.
390	 * Shall clobber: x0-x17
391	 * --------------------------------------------------
392	 */
393func errata_a76_1257314_wa
394	/*
395	 * Compare x0 against revision r3p0
396	 */
397	mov	x17, x30
398	bl	check_errata_1257314
399	cbz	x0, 1f
400	mrs	x1, CORTEX_A76_CPUACTLR3_EL1
401	orr	x1, x1, CORTEX_A76_CPUACTLR3_EL1_BIT_10
402	msr	CORTEX_A76_CPUACTLR3_EL1, x1
403	isb
4041:
405	ret	x17
406endfunc errata_a76_1257314_wa
407
408func check_errata_1257314
409	mov	x1, #0x30
410	b	cpu_rev_var_ls
411endfunc check_errata_1257314
412
413	/* --------------------------------------------------
414	 * Errata Workaround for Cortex A76 Errata #1262888.
415	 * This applies only to revision <= r3p0 of Cortex A76.
416	 * Inputs:
417	 * x0: variant[4:7] and revision[0:3] of current cpu.
418	 * Shall clobber: x0-x17
419	 * --------------------------------------------------
420	 */
421func errata_a76_1262888_wa
422	/*
423	 * Compare x0 against revision r3p0
424	 */
425	mov	x17, x30
426	bl	check_errata_1262888
427	cbz	x0, 1f
428	mrs	x1, CORTEX_A76_CPUECTLR_EL1
429	orr	x1, x1, CORTEX_A76_CPUECTLR_EL1_BIT_51
430	msr	CORTEX_A76_CPUECTLR_EL1, x1
431	isb
4321:
433	ret	x17
434endfunc errata_a76_1262888_wa
435
436func check_errata_1262888
437	mov	x1, #0x30
438	b	cpu_rev_var_ls
439endfunc check_errata_1262888
440
441	/* ---------------------------------------------------
442	 * Errata Workaround for Cortex A76 Errata #1286807.
443	 * This applies only to revision <= r3p0 of Cortex A76.
444	 * Due to the nature of the errata it is applied unconditionally
445	 * when built in, report it as applicable in this case
446	 * ---------------------------------------------------
447	 */
448func check_errata_1286807
449#if ERRATA_A76_1286807
450	mov x0, #ERRATA_APPLIES
451	ret
452#else
453	mov	x1, #0x30
454	b	cpu_rev_var_ls
455#endif
456endfunc check_errata_1286807
457
458	/* --------------------------------------------------
459	 * Errata workaround for Cortex A76 Errata #1791580.
460	 * This applies to revisions <= r4p0 of Cortex A76.
461	 * Inputs:
462	 * x0: variant[4:7] and revision[0:3] of current cpu.
463	 * Shall clobber: x0-x17
464	 * --------------------------------------------------
465	 */
466func errata_a76_1791580_wa
467	/* Compare x0 against revision r4p0 */
468	mov	x17, x30
469	bl	check_errata_1791580
470	cbz	x0, 1f
471	mrs	x1, CORTEX_A76_CPUACTLR2_EL1
472	orr	x1, x1, CORTEX_A76_CPUACTLR2_EL1_BIT_2
473	msr	CORTEX_A76_CPUACTLR2_EL1, x1
474	isb
4751:
476	ret	x17
477endfunc errata_a76_1791580_wa
478
479func check_errata_1791580
480	/* Applies to everything <=r4p0. */
481	mov	x1, #0x40
482	b	cpu_rev_var_ls
483endfunc check_errata_1791580
484
485	/* --------------------------------------------------
486	 * Errata Workaround for Cortex A76 Errata #1262606,
487	 * #1275112, and #1868343.  #1262606 and #1275112
488	 * apply to revisions <= r3p0 and #1868343 applies to
489	 * revisions <= r4p0.
490	 * Inputs:
491	 * x0: variant[4:7] and revision[0:3] of current cpu.
492	 * Shall clobber: x0-x17
493	 * --------------------------------------------------
494	 */
495
496func errata_a76_1262606_1275112_1868343_wa
497	mov	x17, x30
498
499/* Check for <= r3p0 cases and branch if check passes. */
500#if ERRATA_A76_1262606 || ERRATA_A76_1275112
501	bl	check_errata_1262606
502	cbnz	x0, 1f
503#endif
504
505/* Check for <= r4p0 cases and branch if check fails. */
506#if ERRATA_A76_1868343
507	bl	check_errata_1868343
508	cbz	x0, 2f
509#endif
5101:
511	mrs	x1, CORTEX_A76_CPUACTLR_EL1
512	orr	x1, x1, #CORTEX_A76_CPUACTLR_EL1_BIT_13
513	msr	CORTEX_A76_CPUACTLR_EL1, x1
514	isb
5152:
516	ret	x17
517endfunc errata_a76_1262606_1275112_1868343_wa
518
519func check_errata_1262606
520	mov	x1, #0x30
521	b	cpu_rev_var_ls
522endfunc check_errata_1262606
523
524func check_errata_1275112
525	mov	x1, #0x30
526	b	cpu_rev_var_ls
527endfunc check_errata_1275112
528
529func check_errata_1868343
530	mov	x1, #0x40
531	b	cpu_rev_var_ls
532endfunc check_errata_1868343
533
534/* --------------------------------------------------
535 * Errata Workaround for A76 Erratum 1946160.
536 * This applies to revisions r3p0 - r4p1 of A76.
537 * It also exists in r0p0 - r2p0 but there is no fix
538 * in those revisions.
539 * Inputs:
540 * x0: variant[4:7] and revision[0:3] of current cpu.
541 * Shall clobber: x0-x17
542 * --------------------------------------------------
543 */
544func errata_a76_1946160_wa
545	/* Compare x0 against revisions r3p0 - r4p1 */
546	mov	x17, x30
547	bl	check_errata_1946160
548	cbz	x0, 1f
549
550	mov	x0, #3
551	msr	S3_6_C15_C8_0, x0
552	ldr	x0, =0x10E3900002
553	msr	S3_6_C15_C8_2, x0
554	ldr	x0, =0x10FFF00083
555	msr	S3_6_C15_C8_3, x0
556	ldr	x0, =0x2001003FF
557	msr	S3_6_C15_C8_1, x0
558
559	mov	x0, #4
560	msr	S3_6_C15_C8_0, x0
561	ldr	x0, =0x10E3800082
562	msr	S3_6_C15_C8_2, x0
563	ldr	x0, =0x10FFF00083
564	msr	S3_6_C15_C8_3, x0
565	ldr	x0, =0x2001003FF
566	msr	S3_6_C15_C8_1, x0
567
568	mov	x0, #5
569	msr	S3_6_C15_C8_0, x0
570	ldr	x0, =0x10E3800200
571	msr	S3_6_C15_C8_2, x0
572	ldr	x0, =0x10FFF003E0
573	msr	S3_6_C15_C8_3, x0
574	ldr	x0, =0x2001003FF
575	msr	S3_6_C15_C8_1, x0
576
577	isb
5781:
579	ret	x17
580endfunc errata_a76_1946160_wa
581
582func check_errata_1946160
583	/* Applies to revisions r3p0 - r4p1. */
584	mov	x1, #0x30
585	mov	x2, #0x41
586	b	cpu_rev_var_range
587endfunc check_errata_1946160
588
589func check_errata_cve_2018_3639
590#if WORKAROUND_CVE_2018_3639
591	mov	x0, #ERRATA_APPLIES
592#else
593	mov	x0, #ERRATA_MISSING
594#endif
595	ret
596endfunc check_errata_cve_2018_3639
597
598func cortex_a76_disable_wa_cve_2018_3639
599	mrs	x0, CORTEX_A76_CPUACTLR2_EL1
600	bic	x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
601	msr	CORTEX_A76_CPUACTLR2_EL1, x0
602	isb
603	ret
604endfunc cortex_a76_disable_wa_cve_2018_3639
605
606	/* --------------------------------------------------------------
607	 * Errata Workaround for Cortex A76 Errata #1165522.
608	 * This applies only to revisions <= r3p0 of Cortex A76.
609	 * Due to the nature of the errata it is applied unconditionally
610	 * when built in, report it as applicable in this case
611	 * --------------------------------------------------------------
612	 */
613func check_errata_1165522
614#if ERRATA_A76_1165522
615	mov	x0, #ERRATA_APPLIES
616	ret
617#else
618	mov	x1, #0x30
619	b	cpu_rev_var_ls
620#endif
621endfunc check_errata_1165522
622
623func check_errata_cve_2022_23960
624#if WORKAROUND_CVE_2022_23960
625	mov	x0, #ERRATA_APPLIES
626#else
627	mov	x0, #ERRATA_MISSING
628#endif /* WORKAROUND_CVE_2022_23960 */
629	ret
630endfunc check_errata_cve_2022_23960
631
632	/* -------------------------------------------------
633	 * The CPU Ops reset function for Cortex-A76.
634	 * Shall clobber: x0-x19
635	 * -------------------------------------------------
636	 */
637func cortex_a76_reset_func
638	mov	x19, x30
639	bl	cpu_get_rev_var
640	mov	x18, x0
641
642#if ERRATA_A76_1073348
643	mov	x0, x18
644	bl	errata_a76_1073348_wa
645#endif
646
647#if ERRATA_A76_1130799
648	mov	x0, x18
649	bl	errata_a76_1130799_wa
650#endif
651
652#if ERRATA_A76_1220197
653	mov	x0, x18
654	bl	errata_a76_1220197_wa
655#endif
656
657#if ERRATA_A76_1257314
658	mov	x0, x18
659	bl	errata_a76_1257314_wa
660#endif
661
662#if ERRATA_A76_1262606 || ERRATA_A76_1275112 || ERRATA_A76_1868343
663	mov	x0, x18
664	bl	errata_a76_1262606_1275112_1868343_wa
665#endif
666
667#if ERRATA_A76_1262888
668	mov	x0, x18
669	bl	errata_a76_1262888_wa
670#endif
671
672#if ERRATA_A76_1791580
673	mov	x0, x18
674	bl	errata_a76_1791580_wa
675#endif
676
677#if ERRATA_A76_1946160
678	mov	x0, x18
679	bl	errata_a76_1946160_wa
680#endif
681
682#if WORKAROUND_CVE_2018_3639
683	/* If the PE implements SSBS, we don't need the dynamic workaround */
684	mrs	x0, id_aa64pfr1_el1
685	lsr	x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
686	and	x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
687#if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
688	cmp	x0, 0
689	ASM_ASSERT(ne)
690#endif
691#if DYNAMIC_WORKAROUND_CVE_2018_3639
692	cbnz	x0, 1f
693	mrs	x0, CORTEX_A76_CPUACTLR2_EL1
694	orr	x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
695	msr	CORTEX_A76_CPUACTLR2_EL1, x0
696	isb
697
698#ifdef IMAGE_BL31
699	/*
700	 * The Cortex-A76 generic vectors are overwritten to use the vectors
701	 * defined above. This is required in order to apply mitigation
702	 * against CVE-2018-3639 on exception entry from lower ELs.
703	 * If the below vector table is used, skip overriding it again for
704	 *  CVE_2022_23960 as both use the same vbar.
705	 */
706	adr	x0, cortex_a76_wa_cve_vbar
707	msr	vbar_el3, x0
708	isb
709	b	2f
710#endif /* IMAGE_BL31 */
711
7121:
713#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
714#endif /* WORKAROUND_CVE_2018_3639 */
715
716#if IMAGE_BL31 && WORKAROUND_CVE_2022_23960
717	/*
718	 * The Cortex-A76 generic vectors are overridden to apply errata
719	 * mitigation on exception entry from lower ELs. This will be bypassed
720	 * if DYNAMIC_WORKAROUND_CVE_2018_3639 has overridden the vectors.
721	 */
722	adr	x0, cortex_a76_wa_cve_vbar
723	msr	vbar_el3, x0
724	isb
725#endif /* IMAGE_BL31 && WORKAROUND_CVE_2022_23960 */
7262:
727
728#if ERRATA_DSU_798953
729	bl	errata_dsu_798953_wa
730#endif
731
732#if ERRATA_DSU_936184
733	bl	errata_dsu_936184_wa
734#endif
735
736	ret	x19
737endfunc cortex_a76_reset_func
738
739	/* ---------------------------------------------
740	 * HW will do the cache maintenance while powering down
741	 * ---------------------------------------------
742	 */
743func cortex_a76_core_pwr_dwn
744	/* ---------------------------------------------
745	 * Enable CPU power down bit in power control register
746	 * ---------------------------------------------
747	 */
748	mrs	x0, CORTEX_A76_CPUPWRCTLR_EL1
749	orr	x0, x0, #CORTEX_A76_CORE_PWRDN_EN_MASK
750	msr	CORTEX_A76_CPUPWRCTLR_EL1, x0
751	isb
752	ret
753endfunc cortex_a76_core_pwr_dwn
754
755#if REPORT_ERRATA
756/*
757 * Errata printing function for Cortex A76. Must follow AAPCS.
758 */
759func cortex_a76_errata_report
760	stp	x8, x30, [sp, #-16]!
761
762	bl	cpu_get_rev_var
763	mov	x8, x0
764
765	/*
766	 * Report all errata. The revision-variant information is passed to
767	 * checking functions of each errata.
768	 */
769	report_errata ERRATA_A76_1073348, cortex_a76, 1073348
770	report_errata ERRATA_A76_1130799, cortex_a76, 1130799
771	report_errata ERRATA_A76_1220197, cortex_a76, 1220197
772	report_errata ERRATA_A76_1257314, cortex_a76, 1257314
773	report_errata ERRATA_A76_1262606, cortex_a76, 1262606
774	report_errata ERRATA_A76_1262888, cortex_a76, 1262888
775	report_errata ERRATA_A76_1275112, cortex_a76, 1275112
776	report_errata ERRATA_A76_1286807, cortex_a76, 1286807
777	report_errata ERRATA_A76_1791580, cortex_a76, 1791580
778	report_errata ERRATA_A76_1165522, cortex_a76, 1165522
779	report_errata ERRATA_A76_1868343, cortex_a76, 1868343
780	report_errata ERRATA_A76_1946160, cortex_a76, 1946160
781	report_errata WORKAROUND_CVE_2018_3639, cortex_a76, cve_2018_3639
782	report_errata ERRATA_DSU_798953, cortex_a76, dsu_798953
783	report_errata ERRATA_DSU_936184, cortex_a76, dsu_936184
784	report_errata WORKAROUND_CVE_2022_23960, cortex_a76, cve_2022_23960
785
786	ldp	x8, x30, [sp], #16
787	ret
788endfunc cortex_a76_errata_report
789#endif
790
791	/* ---------------------------------------------
792	 * This function provides cortex_a76 specific
793	 * register information for crash reporting.
794	 * It needs to return with x6 pointing to
795	 * a list of register names in ascii and
796	 * x8 - x15 having values of registers to be
797	 * reported.
798	 * ---------------------------------------------
799	 */
800.section .rodata.cortex_a76_regs, "aS"
801cortex_a76_regs:  /* The ascii list of register names to be reported */
802	.asciz	"cpuectlr_el1", ""
803
804func cortex_a76_cpu_reg_dump
805	adr	x6, cortex_a76_regs
806	mrs	x8, CORTEX_A76_CPUECTLR_EL1
807	ret
808endfunc cortex_a76_cpu_reg_dump
809
810declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
811	cortex_a76_reset_func, \
812	CPU_NO_EXTRA1_FUNC, \
813	cortex_a76_disable_wa_cve_2018_3639, \
814	CPU_NO_EXTRA3_FUNC, \
815	cortex_a76_core_pwr_dwn
816