xref: /rk3399_ARM-atf/lib/cpus/aarch64/cortex_a76.S (revision 921081049ec37c285c7cac8b845c8a5e829b68c4)
1/*
2 * Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <common/bl_common.h>
10#include <context.h>
11#include <cortex_a76.h>
12#include <cpu_macros.S>
13#include <plat_macros.S>
14#include <services/arm_arch_svc.h>
15
16/* Hardware handled coherency */
17#if HW_ASSISTED_COHERENCY == 0
18#error "Cortex-A76 must be compiled with HW_ASSISTED_COHERENCY enabled"
19#endif
20
21/* 64-bit only core */
22#if CTX_INCLUDE_AARCH32_REGS == 1
23#error "Cortex-A76 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
24#endif
25
26#define ESR_EL3_A64_SMC0	0x5e000000
27#define ESR_EL3_A32_SMC0	0x4e000000
28
29#if DYNAMIC_WORKAROUND_CVE_2018_3639
30	/*
31	 * This macro applies the mitigation for CVE-2018-3639.
32	 * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
33	 * SMC calls from a lower EL running in AArch32 or AArch64
34	 * will go through the fast and return early.
35	 *
36	 * The macro saves x2-x3 to the context. In the fast path
37	 * x0-x3 registers do not need to be restored as the calling
38	 * context will have saved them. The macro also saves
39	 * x29-x30 to the context in the sync_exception path.
40	 */
41	.macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
42	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
43	.if \_is_sync_exception
44	stp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
45	mov_imm	w2, \_esr_el3_val
46	bl	apply_cve_2018_3639_sync_wa
47	ldp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
48	.endif
49	/*
50	 * Always enable v4 mitigation during EL3 execution. This is not
51	 * required for the fast path above because it does not perform any
52	 * memory loads.
53	 */
54	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
55	orr	x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
56	msr	CORTEX_A76_CPUACTLR2_EL1, x2
57	isb
58
59	/*
60	 * The caller may have passed arguments to EL3 via x2-x3.
61	 * Restore these registers from the context before jumping to the
62	 * main runtime vector table entry.
63	 */
64	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
65	.endm
66
67vector_base cortex_a76_wa_cve_2018_3639_a76_vbar
68
69	/* ---------------------------------------------------------------------
70	 * Current EL with SP_EL0 : 0x0 - 0x200
71	 * ---------------------------------------------------------------------
72	 */
73vector_entry cortex_a76_sync_exception_sp_el0
74	b	sync_exception_sp_el0
75end_vector_entry cortex_a76_sync_exception_sp_el0
76
77vector_entry cortex_a76_irq_sp_el0
78	b	irq_sp_el0
79end_vector_entry cortex_a76_irq_sp_el0
80
81vector_entry cortex_a76_fiq_sp_el0
82	b	fiq_sp_el0
83end_vector_entry cortex_a76_fiq_sp_el0
84
85vector_entry cortex_a76_serror_sp_el0
86	b	serror_sp_el0
87end_vector_entry cortex_a76_serror_sp_el0
88
89	/* ---------------------------------------------------------------------
90	 * Current EL with SP_ELx: 0x200 - 0x400
91	 * ---------------------------------------------------------------------
92	 */
93vector_entry cortex_a76_sync_exception_sp_elx
94	b	sync_exception_sp_elx
95end_vector_entry cortex_a76_sync_exception_sp_elx
96
97vector_entry cortex_a76_irq_sp_elx
98	b	irq_sp_elx
99end_vector_entry cortex_a76_irq_sp_elx
100
101vector_entry cortex_a76_fiq_sp_elx
102	b	fiq_sp_elx
103end_vector_entry cortex_a76_fiq_sp_elx
104
105vector_entry cortex_a76_serror_sp_elx
106	b	serror_sp_elx
107end_vector_entry cortex_a76_serror_sp_elx
108
109	/* ---------------------------------------------------------------------
110	 * Lower EL using AArch64 : 0x400 - 0x600
111	 * ---------------------------------------------------------------------
112	 */
113vector_entry cortex_a76_sync_exception_aarch64
114	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
115	b	sync_exception_aarch64
116end_vector_entry cortex_a76_sync_exception_aarch64
117
118vector_entry cortex_a76_irq_aarch64
119	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
120	b	irq_aarch64
121end_vector_entry cortex_a76_irq_aarch64
122
123vector_entry cortex_a76_fiq_aarch64
124	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
125	b	fiq_aarch64
126end_vector_entry cortex_a76_fiq_aarch64
127
128vector_entry cortex_a76_serror_aarch64
129	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
130	b	serror_aarch64
131end_vector_entry cortex_a76_serror_aarch64
132
133	/* ---------------------------------------------------------------------
134	 * Lower EL using AArch32 : 0x600 - 0x800
135	 * ---------------------------------------------------------------------
136	 */
137vector_entry cortex_a76_sync_exception_aarch32
138	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
139	b	sync_exception_aarch32
140end_vector_entry cortex_a76_sync_exception_aarch32
141
142vector_entry cortex_a76_irq_aarch32
143	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
144	b	irq_aarch32
145end_vector_entry cortex_a76_irq_aarch32
146
147vector_entry cortex_a76_fiq_aarch32
148	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
149	b	fiq_aarch32
150end_vector_entry cortex_a76_fiq_aarch32
151
152vector_entry cortex_a76_serror_aarch32
153	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
154	b	serror_aarch32
155end_vector_entry cortex_a76_serror_aarch32
156
157	/*
158	 * -----------------------------------------------------------------
159	 * This function applies the mitigation for CVE-2018-3639
160	 * specifically for sync exceptions. It implements a fast path
161	 * where `SMCCC_ARCH_WORKAROUND_2` SMC calls from a lower EL
162	 * running in AArch64 will go through the fast and return early.
163	 *
164	 * In the fast path x0-x3 registers do not need to be restored as the
165	 * calling context will have saved them.
166	 *
167	 * Caller must pass value of esr_el3 to compare via x2.
168	 * Save and restore these registers outside of this function from the
169	 * context before jumping to the main runtime vector table entry.
170	 *
171	 * Shall clobber: x0-x3, x30
172	 * -----------------------------------------------------------------
173	 */
174func apply_cve_2018_3639_sync_wa
175	/*
176	 * Ensure SMC is coming from A64/A32 state on #0
177	 * with W0 = SMCCC_ARCH_WORKAROUND_2
178	 *
179	 * This sequence evaluates as:
180	 *    (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
181	 * allowing use of a single branch operation
182	 * X2 populated outside this function with the SMC FID.
183	 */
184	orr	w3, wzr, #SMCCC_ARCH_WORKAROUND_2
185	cmp	x0, x3
186	mrs	x3, esr_el3
187
188	ccmp	w2, w3, #0, eq
189	/*
190	 * Static predictor will predict a fall-through, optimizing
191	 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
192	 */
193	bne	1f
194
195	/*
196	* The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
197	* fast path.
198	*/
199	cmp	x1, xzr /* enable/disable check */
200
201	/*
202	 * When the calling context wants mitigation disabled,
203	 * we program the mitigation disable function in the
204	 * CPU context, which gets invoked on subsequent exits from
205	 * EL3 via the `el3_exit` function. Otherwise NULL is
206	 * programmed in the CPU context, which results in caller's
207	 * inheriting the EL3 mitigation state (enabled) on subsequent
208	 * `el3_exit`.
209	 */
210	mov	x0, xzr
211	adr	x1, cortex_a76_disable_wa_cve_2018_3639
212	csel	x1, x1, x0, eq
213	str	x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
214
215	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
216	orr	x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
217	bic	x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
218	csel	x3, x3, x1, eq
219	msr	CORTEX_A76_CPUACTLR2_EL1, x3
220	ldp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
221	/*
222	* `SMCCC_ARCH_WORKAROUND_2`fast path return to lower EL.
223	*/
224	exception_return /* exception_return contains ISB */
2251:
226	ret
227endfunc apply_cve_2018_3639_sync_wa
228#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
229
230	/* --------------------------------------------------
231	 * Errata Workaround for Cortex A76 Errata #1073348.
232	 * This applies only to revision <= r1p0 of Cortex A76.
233	 * Inputs:
234	 * x0: variant[4:7] and revision[0:3] of current cpu.
235	 * Shall clobber: x0-x17
236	 * --------------------------------------------------
237	 */
238func errata_a76_1073348_wa
239	/*
240	 * Compare x0 against revision r1p0
241	 */
242	mov	x17, x30
243	bl	check_errata_1073348
244	cbz	x0, 1f
245	mrs	x1, CORTEX_A76_CPUACTLR_EL1
246	orr	x1, x1 ,#CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
247	msr	CORTEX_A76_CPUACTLR_EL1, x1
248	isb
2491:
250	ret	x17
251endfunc errata_a76_1073348_wa
252
253func check_errata_1073348
254	mov	x1, #0x10
255	b	cpu_rev_var_ls
256endfunc check_errata_1073348
257
258	/* --------------------------------------------------
259	 * Errata Workaround for Cortex A76 Errata #1130799.
260	 * This applies only to revision <= r2p0 of Cortex A76.
261	 * Inputs:
262	 * x0: variant[4:7] and revision[0:3] of current cpu.
263	 * Shall clobber: x0-x17
264	 * --------------------------------------------------
265	 */
266func errata_a76_1130799_wa
267	/*
268	 * Compare x0 against revision r2p0
269	 */
270	mov	x17, x30
271	bl	check_errata_1130799
272	cbz	x0, 1f
273	mrs	x1, CORTEX_A76_CPUACTLR2_EL1
274	orr	x1, x1 ,#(1 << 59)
275	msr	CORTEX_A76_CPUACTLR2_EL1, x1
276	isb
2771:
278	ret	x17
279endfunc errata_a76_1130799_wa
280
281func check_errata_1130799
282	mov	x1, #0x20
283	b	cpu_rev_var_ls
284endfunc check_errata_1130799
285
286	/* --------------------------------------------------
287	 * Errata Workaround for Cortex A76 Errata #1220197.
288	 * This applies only to revision <= r2p0 of Cortex A76.
289	 * Inputs:
290	 * x0: variant[4:7] and revision[0:3] of current cpu.
291	 * Shall clobber: x0-x17
292	 * --------------------------------------------------
293	 */
294func errata_a76_1220197_wa
295/*
296 * Compare x0 against revision r2p0
297 */
298	mov	x17, x30
299	bl	check_errata_1220197
300	cbz	x0, 1f
301	mrs	x1, CORTEX_A76_CPUECTLR_EL1
302	orr	x1, x1, #CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
303	msr	CORTEX_A76_CPUECTLR_EL1, x1
304	isb
3051:
306	ret	x17
307endfunc errata_a76_1220197_wa
308
309func check_errata_1220197
310	mov	x1, #0x20
311	b	cpu_rev_var_ls
312endfunc check_errata_1220197
313
314	/* --------------------------------------------------
315	 * Errata Workaround for Cortex A76 Errata #1257314.
316	 * This applies only to revision <= r3p0 of Cortex A76.
317	 * Inputs:
318	 * x0: variant[4:7] and revision[0:3] of current cpu.
319	 * Shall clobber: x0-x17
320	 * --------------------------------------------------
321	 */
322func errata_a76_1257314_wa
323	/*
324	 * Compare x0 against revision r3p0
325	 */
326	mov	x17, x30
327	bl	check_errata_1257314
328	cbz	x0, 1f
329	mrs	x1, CORTEX_A76_CPUACTLR3_EL1
330	orr	x1, x1, CORTEX_A76_CPUACTLR3_EL1_BIT_10
331	msr	CORTEX_A76_CPUACTLR3_EL1, x1
332	isb
3331:
334	ret	x17
335endfunc errata_a76_1257314_wa
336
337func check_errata_1257314
338	mov	x1, #0x30
339	b	cpu_rev_var_ls
340endfunc check_errata_1257314
341
342	/* --------------------------------------------------
343	 * Errata Workaround for Cortex A76 Errata #1262888.
344	 * This applies only to revision <= r3p0 of Cortex A76.
345	 * Inputs:
346	 * x0: variant[4:7] and revision[0:3] of current cpu.
347	 * Shall clobber: x0-x17
348	 * --------------------------------------------------
349	 */
350func errata_a76_1262888_wa
351	/*
352	 * Compare x0 against revision r3p0
353	 */
354	mov	x17, x30
355	bl	check_errata_1262888
356	cbz	x0, 1f
357	mrs	x1, CORTEX_A76_CPUECTLR_EL1
358	orr	x1, x1, CORTEX_A76_CPUECTLR_EL1_BIT_51
359	msr	CORTEX_A76_CPUECTLR_EL1, x1
360	isb
3611:
362	ret	x17
363endfunc errata_a76_1262888_wa
364
365func check_errata_1262888
366	mov	x1, #0x30
367	b	cpu_rev_var_ls
368endfunc check_errata_1262888
369
370	/* ---------------------------------------------------
371	 * Errata Workaround for Cortex A76 Errata #1286807.
372	 * This applies only to revision <= r3p0 of Cortex A76.
373	 * Due to the nature of the errata it is applied unconditionally
374	 * when built in, report it as applicable in this case
375	 * ---------------------------------------------------
376	 */
377func check_errata_1286807
378#if ERRATA_A76_1286807
379	mov x0, #ERRATA_APPLIES
380	ret
381#else
382	mov	x1, #0x30
383	b	cpu_rev_var_ls
384#endif
385endfunc check_errata_1286807
386
387	/* --------------------------------------------------
388	 * Errata workaround for Cortex A76 Errata #1791580.
389	 * This applies to revisions <= r4p0 of Cortex A76.
390	 * Inputs:
391	 * x0: variant[4:7] and revision[0:3] of current cpu.
392	 * Shall clobber: x0-x17
393	 * --------------------------------------------------
394	 */
395func errata_a76_1791580_wa
396	/* Compare x0 against revision r4p0 */
397	mov	x17, x30
398	bl	check_errata_1791580
399	cbz	x0, 1f
400	mrs	x1, CORTEX_A76_CPUACTLR2_EL1
401	orr	x1, x1, CORTEX_A76_CPUACTLR2_EL1_BIT_2
402	msr	CORTEX_A76_CPUACTLR2_EL1, x1
403	isb
4041:
405	ret	x17
406endfunc errata_a76_1791580_wa
407
408func check_errata_1791580
409	/* Applies to everything <=r4p0. */
410	mov	x1, #0x40
411	b	cpu_rev_var_ls
412endfunc check_errata_1791580
413
414	/* --------------------------------------------------
415	 * Errata Workaround for Cortex A76 Errata #1262606,
416	 * #1275112, and #1868343.  #1262606 and #1275112
417	 * apply to revisions <= r3p0 and #1868343 applies to
418	 * revisions <= r4p0.
419	 * Inputs:
420	 * x0: variant[4:7] and revision[0:3] of current cpu.
421	 * Shall clobber: x0-x17
422	 * --------------------------------------------------
423	 */
424
425func errata_a76_1262606_1275112_1868343_wa
426	mov	x17, x30
427
428/* Check for <= r3p0 cases and branch if check passes. */
429#if ERRATA_A76_1262606 || ERRATA_A76_1275112
430	bl	check_errata_1262606
431	cbnz	x0, 1f
432#endif
433
434/* Check for <= r4p0 cases and branch if check fails. */
435#if ERRATA_A76_1868343
436	bl	check_errata_1868343
437	cbz	x0, 2f
438#endif
4391:
440	mrs	x1, CORTEX_A76_CPUACTLR_EL1
441	orr	x1, x1, #CORTEX_A76_CPUACTLR_EL1_BIT_13
442	msr	CORTEX_A76_CPUACTLR_EL1, x1
443	isb
4442:
445	ret	x17
446endfunc errata_a76_1262606_1275112_1868343_wa
447
448func check_errata_1262606
449	mov	x1, #0x30
450	b	cpu_rev_var_ls
451endfunc check_errata_1262606
452
453func check_errata_1275112
454	mov	x1, #0x30
455	b	cpu_rev_var_ls
456endfunc check_errata_1275112
457
458func check_errata_1868343
459	mov	x1, #0x40
460	b	cpu_rev_var_ls
461endfunc check_errata_1868343
462
463/* --------------------------------------------------
464 * Errata Workaround for A76 Erratum 1946160.
465 * This applies to revisions r3p0 - r4p1 of A76.
466 * It also exists in r0p0 - r2p0 but there is no fix
467 * in those revisions.
468 * Inputs:
469 * x0: variant[4:7] and revision[0:3] of current cpu.
470 * Shall clobber: x0-x17
471 * --------------------------------------------------
472 */
473func errata_a76_1946160_wa
474	/* Compare x0 against revisions r3p0 - r4p1 */
475	mov	x17, x30
476	bl	check_errata_1946160
477	cbz	x0, 1f
478
479	mov	x0, #3
480	msr	S3_6_C15_C8_0, x0
481	ldr	x0, =0x10E3900002
482	msr	S3_6_C15_C8_2, x0
483	ldr	x0, =0x10FFF00083
484	msr	S3_6_C15_C8_3, x0
485	ldr	x0, =0x2001003FF
486	msr	S3_6_C15_C8_1, x0
487
488	mov	x0, #4
489	msr	S3_6_C15_C8_0, x0
490	ldr	x0, =0x10E3800082
491	msr	S3_6_C15_C8_2, x0
492	ldr	x0, =0x10FFF00083
493	msr	S3_6_C15_C8_3, x0
494	ldr	x0, =0x2001003FF
495	msr	S3_6_C15_C8_1, x0
496
497	mov	x0, #5
498	msr	S3_6_C15_C8_0, x0
499	ldr	x0, =0x10E3800200
500	msr	S3_6_C15_C8_2, x0
501	ldr	x0, =0x10FFF003E0
502	msr	S3_6_C15_C8_3, x0
503	ldr	x0, =0x2001003FF
504	msr	S3_6_C15_C8_1, x0
505
506	isb
5071:
508	ret	x17
509endfunc errata_a76_1946160_wa
510
511func check_errata_1946160
512	/* Applies to revisions r3p0 - r4p1. */
513	mov	x1, #0x30
514	mov	x2, #0x41
515	b	cpu_rev_var_range
516endfunc check_errata_1946160
517
518func check_errata_cve_2018_3639
519#if WORKAROUND_CVE_2018_3639
520	mov	x0, #ERRATA_APPLIES
521#else
522	mov	x0, #ERRATA_MISSING
523#endif
524	ret
525endfunc check_errata_cve_2018_3639
526
527func cortex_a76_disable_wa_cve_2018_3639
528	mrs	x0, CORTEX_A76_CPUACTLR2_EL1
529	bic	x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
530	msr	CORTEX_A76_CPUACTLR2_EL1, x0
531	isb
532	ret
533endfunc cortex_a76_disable_wa_cve_2018_3639
534
535	/* --------------------------------------------------------------
536	 * Errata Workaround for Cortex A76 Errata #1165522.
537	 * This applies only to revisions <= r3p0 of Cortex A76.
538	 * Due to the nature of the errata it is applied unconditionally
539	 * when built in, report it as applicable in this case
540	 * --------------------------------------------------------------
541	 */
542func check_errata_1165522
543#if ERRATA_A76_1165522
544	mov	x0, #ERRATA_APPLIES
545	ret
546#else
547	mov	x1, #0x30
548	b	cpu_rev_var_ls
549#endif
550endfunc check_errata_1165522
551
552	/* -------------------------------------------------
553	 * The CPU Ops reset function for Cortex-A76.
554	 * Shall clobber: x0-x19
555	 * -------------------------------------------------
556	 */
557func cortex_a76_reset_func
558	mov	x19, x30
559	bl	cpu_get_rev_var
560	mov	x18, x0
561
562#if ERRATA_A76_1073348
563	mov	x0, x18
564	bl	errata_a76_1073348_wa
565#endif
566
567#if ERRATA_A76_1130799
568	mov	x0, x18
569	bl	errata_a76_1130799_wa
570#endif
571
572#if ERRATA_A76_1220197
573	mov	x0, x18
574	bl	errata_a76_1220197_wa
575#endif
576
577#if ERRATA_A76_1257314
578	mov	x0, x18
579	bl	errata_a76_1257314_wa
580#endif
581
582#if ERRATA_A76_1262606 || ERRATA_A76_1275112 || ERRATA_A76_1868343
583	mov	x0, x18
584	bl	errata_a76_1262606_1275112_1868343_wa
585#endif
586
587#if ERRATA_A76_1262888
588	mov	x0, x18
589	bl	errata_a76_1262888_wa
590#endif
591
592#if ERRATA_A76_1791580
593	mov	x0, x18
594	bl	errata_a76_1791580_wa
595#endif
596
597#if ERRATA_A76_1946160
598	mov	x0, x18
599	bl	errata_a76_1946160_wa
600#endif
601
602#if WORKAROUND_CVE_2018_3639
603	/* If the PE implements SSBS, we don't need the dynamic workaround */
604	mrs	x0, id_aa64pfr1_el1
605	lsr	x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
606	and	x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
607#if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
608	cmp	x0, 0
609	ASM_ASSERT(ne)
610#endif
611#if DYNAMIC_WORKAROUND_CVE_2018_3639
612	cbnz	x0, 1f
613	mrs	x0, CORTEX_A76_CPUACTLR2_EL1
614	orr	x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
615	msr	CORTEX_A76_CPUACTLR2_EL1, x0
616	isb
617
618#ifdef IMAGE_BL31
619	/*
620	 * The Cortex-A76 generic vectors are overwritten to use the vectors
621	 * defined above. This is required in order to apply mitigation
622	 * against CVE-2018-3639 on exception entry from lower ELs.
623	 */
624	adr	x0, cortex_a76_wa_cve_2018_3639_a76_vbar
625	msr	vbar_el3, x0
626	isb
627#endif /* IMAGE_BL31 */
628
6291:
630#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
631#endif /* WORKAROUND_CVE_2018_3639 */
632
633#if ERRATA_DSU_798953
634	bl	errata_dsu_798953_wa
635#endif
636
637#if ERRATA_DSU_936184
638	bl	errata_dsu_936184_wa
639#endif
640
641	ret	x19
642endfunc cortex_a76_reset_func
643
644	/* ---------------------------------------------
645	 * HW will do the cache maintenance while powering down
646	 * ---------------------------------------------
647	 */
648func cortex_a76_core_pwr_dwn
649	/* ---------------------------------------------
650	 * Enable CPU power down bit in power control register
651	 * ---------------------------------------------
652	 */
653	mrs	x0, CORTEX_A76_CPUPWRCTLR_EL1
654	orr	x0, x0, #CORTEX_A76_CORE_PWRDN_EN_MASK
655	msr	CORTEX_A76_CPUPWRCTLR_EL1, x0
656	isb
657	ret
658endfunc cortex_a76_core_pwr_dwn
659
660#if REPORT_ERRATA
661/*
662 * Errata printing function for Cortex A76. Must follow AAPCS.
663 */
664func cortex_a76_errata_report
665	stp	x8, x30, [sp, #-16]!
666
667	bl	cpu_get_rev_var
668	mov	x8, x0
669
670	/*
671	 * Report all errata. The revision-variant information is passed to
672	 * checking functions of each errata.
673	 */
674	report_errata ERRATA_A76_1073348, cortex_a76, 1073348
675	report_errata ERRATA_A76_1130799, cortex_a76, 1130799
676	report_errata ERRATA_A76_1220197, cortex_a76, 1220197
677	report_errata ERRATA_A76_1257314, cortex_a76, 1257314
678	report_errata ERRATA_A76_1262606, cortex_a76, 1262606
679	report_errata ERRATA_A76_1262888, cortex_a76, 1262888
680	report_errata ERRATA_A76_1275112, cortex_a76, 1275112
681	report_errata ERRATA_A76_1286807, cortex_a76, 1286807
682	report_errata ERRATA_A76_1791580, cortex_a76, 1791580
683	report_errata ERRATA_A76_1165522, cortex_a76, 1165522
684	report_errata ERRATA_A76_1868343, cortex_a76, 1868343
685	report_errata ERRATA_A76_1946160, cortex_a76, 1946160
686	report_errata WORKAROUND_CVE_2018_3639, cortex_a76, cve_2018_3639
687	report_errata ERRATA_DSU_798953, cortex_a76, dsu_798953
688	report_errata ERRATA_DSU_936184, cortex_a76, dsu_936184
689
690	ldp	x8, x30, [sp], #16
691	ret
692endfunc cortex_a76_errata_report
693#endif
694
695	/* ---------------------------------------------
696	 * This function provides cortex_a76 specific
697	 * register information for crash reporting.
698	 * It needs to return with x6 pointing to
699	 * a list of register names in ascii and
700	 * x8 - x15 having values of registers to be
701	 * reported.
702	 * ---------------------------------------------
703	 */
704.section .rodata.cortex_a76_regs, "aS"
705cortex_a76_regs:  /* The ascii list of register names to be reported */
706	.asciz	"cpuectlr_el1", ""
707
708func cortex_a76_cpu_reg_dump
709	adr	x6, cortex_a76_regs
710	mrs	x8, CORTEX_A76_CPUECTLR_EL1
711	ret
712endfunc cortex_a76_cpu_reg_dump
713
714declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
715	cortex_a76_reset_func, \
716	CPU_NO_EXTRA1_FUNC, \
717	cortex_a76_disable_wa_cve_2018_3639, \
718	cortex_a76_core_pwr_dwn
719