xref: /rk3399_ARM-atf/lib/cpus/aarch64/cortex_a76.S (revision e6e1d0ac162027334471e1eb9f1e0ce46065db6a)
1/*
2 * Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <common/bl_common.h>
10#include <context.h>
11#include <cortex_a76.h>
12#include <cpu_macros.S>
13#include <plat_macros.S>
14#include <services/arm_arch_svc.h>
15
16#define ESR_EL3_A64_SMC0	0x5e000000
17#define ESR_EL3_A32_SMC0	0x4e000000
18
19#if DYNAMIC_WORKAROUND_CVE_2018_3639
20	/*
21	 * This macro applies the mitigation for CVE-2018-3639.
22	 * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
23	 * SMC calls from a lower EL running in AArch32 or AArch64
24	 * will go through the fast and return early.
25	 *
26	 * The macro saves x2-x3 to the context. In the fast path
27	 * x0-x3 registers do not need to be restored as the calling
28	 * context will have saved them.
29	 */
30	.macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
31	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
32
33	.if \_is_sync_exception
34		/*
35		 * Ensure SMC is coming from A64/A32 state on #0
36		 * with W0 = SMCCC_ARCH_WORKAROUND_2
37		 *
38		 * This sequence evaluates as:
39		 *    (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
40		 * allowing use of a single branch operation
41		 */
42		orr	w2, wzr, #SMCCC_ARCH_WORKAROUND_2
43		cmp	x0, x2
44		mrs	x3, esr_el3
45		mov_imm	w2, \_esr_el3_val
46		ccmp	w2, w3, #0, eq
47		/*
48		 * Static predictor will predict a fall-through, optimizing
49		 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
50		 */
51		bne	1f
52
53		/*
54		 * The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
55		 * fast path.
56		 */
57		cmp	x1, xzr /* enable/disable check */
58
59		/*
60		 * When the calling context wants mitigation disabled,
61		 * we program the mitigation disable function in the
62		 * CPU context, which gets invoked on subsequent exits from
63		 * EL3 via the `el3_exit` function. Otherwise NULL is
64		 * programmed in the CPU context, which results in caller's
65		 * inheriting the EL3 mitigation state (enabled) on subsequent
66		 * `el3_exit`.
67		 */
68		mov	x0, xzr
69		adr	x1, cortex_a76_disable_wa_cve_2018_3639
70		csel	x1, x1, x0, eq
71		str	x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
72
73		mrs	x2, CORTEX_A76_CPUACTLR2_EL1
74		orr	x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
75		bic	x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
76		csel	x3, x3, x1, eq
77		msr	CORTEX_A76_CPUACTLR2_EL1, x3
78		eret	/* ERET implies ISB */
79	.endif
801:
81	/*
82	 * Always enable v4 mitigation during EL3 execution. This is not
83	 * required for the fast path above because it does not perform any
84	 * memory loads.
85	 */
86	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
87	orr	x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
88	msr	CORTEX_A76_CPUACTLR2_EL1, x2
89	isb
90
91	/*
92	 * The caller may have passed arguments to EL3 via x2-x3.
93	 * Restore these registers from the context before jumping to the
94	 * main runtime vector table entry.
95	 */
96	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
97	.endm
98
99vector_base cortex_a76_wa_cve_2018_3639_a76_vbar
100
101	/* ---------------------------------------------------------------------
102	 * Current EL with SP_EL0 : 0x0 - 0x200
103	 * ---------------------------------------------------------------------
104	 */
105vector_entry cortex_a76_sync_exception_sp_el0
106	b	sync_exception_sp_el0
107end_vector_entry cortex_a76_sync_exception_sp_el0
108
109vector_entry cortex_a76_irq_sp_el0
110	b	irq_sp_el0
111end_vector_entry cortex_a76_irq_sp_el0
112
113vector_entry cortex_a76_fiq_sp_el0
114	b	fiq_sp_el0
115end_vector_entry cortex_a76_fiq_sp_el0
116
117vector_entry cortex_a76_serror_sp_el0
118	b	serror_sp_el0
119end_vector_entry cortex_a76_serror_sp_el0
120
121	/* ---------------------------------------------------------------------
122	 * Current EL with SP_ELx: 0x200 - 0x400
123	 * ---------------------------------------------------------------------
124	 */
125vector_entry cortex_a76_sync_exception_sp_elx
126	b	sync_exception_sp_elx
127end_vector_entry cortex_a76_sync_exception_sp_elx
128
129vector_entry cortex_a76_irq_sp_elx
130	b	irq_sp_elx
131end_vector_entry cortex_a76_irq_sp_elx
132
133vector_entry cortex_a76_fiq_sp_elx
134	b	fiq_sp_elx
135end_vector_entry cortex_a76_fiq_sp_elx
136
137vector_entry cortex_a76_serror_sp_elx
138	b	serror_sp_elx
139end_vector_entry cortex_a76_serror_sp_elx
140
141	/* ---------------------------------------------------------------------
142	 * Lower EL using AArch64 : 0x400 - 0x600
143	 * ---------------------------------------------------------------------
144	 */
145vector_entry cortex_a76_sync_exception_aarch64
146	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
147	b	sync_exception_aarch64
148end_vector_entry cortex_a76_sync_exception_aarch64
149
150vector_entry cortex_a76_irq_aarch64
151	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
152	b	irq_aarch64
153end_vector_entry cortex_a76_irq_aarch64
154
155vector_entry cortex_a76_fiq_aarch64
156	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
157	b	fiq_aarch64
158end_vector_entry cortex_a76_fiq_aarch64
159
160vector_entry cortex_a76_serror_aarch64
161	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
162	b	serror_aarch64
163end_vector_entry cortex_a76_serror_aarch64
164
165	/* ---------------------------------------------------------------------
166	 * Lower EL using AArch32 : 0x600 - 0x800
167	 * ---------------------------------------------------------------------
168	 */
169vector_entry cortex_a76_sync_exception_aarch32
170	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
171	b	sync_exception_aarch32
172end_vector_entry cortex_a76_sync_exception_aarch32
173
174vector_entry cortex_a76_irq_aarch32
175	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
176	b	irq_aarch32
177end_vector_entry cortex_a76_irq_aarch32
178
179vector_entry cortex_a76_fiq_aarch32
180	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
181	b	fiq_aarch32
182end_vector_entry cortex_a76_fiq_aarch32
183
184vector_entry cortex_a76_serror_aarch32
185	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
186	b	serror_aarch32
187end_vector_entry cortex_a76_serror_aarch32
188#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
189
190	/* --------------------------------------------------
191	 * Errata Workaround for Cortex A76 Errata #1073348.
192	 * This applies only to revision <= r1p0 of Cortex A76.
193	 * Inputs:
194	 * x0: variant[4:7] and revision[0:3] of current cpu.
195	 * Shall clobber: x0-x17
196	 * --------------------------------------------------
197	 */
198func errata_a76_1073348_wa
199	/*
200	 * Compare x0 against revision r1p0
201	 */
202	mov	x17, x30
203	bl	check_errata_1073348
204	cbz	x0, 1f
205	mrs	x1, CORTEX_A76_CPUACTLR_EL1
206	orr	x1, x1 ,#CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
207	msr	CORTEX_A76_CPUACTLR_EL1, x1
208	isb
2091:
210	ret	x17
211endfunc errata_a76_1073348_wa
212
213func check_errata_1073348
214	mov	x1, #0x10
215	b	cpu_rev_var_ls
216endfunc check_errata_1073348
217
218	/* --------------------------------------------------
219	 * Errata Workaround for Cortex A76 Errata #1130799.
220	 * This applies only to revision <= r2p0 of Cortex A76.
221	 * Inputs:
222	 * x0: variant[4:7] and revision[0:3] of current cpu.
223	 * Shall clobber: x0-x17
224	 * --------------------------------------------------
225	 */
226func errata_a76_1130799_wa
227	/*
228	 * Compare x0 against revision r2p0
229	 */
230	mov	x17, x30
231	bl	check_errata_1130799
232	cbz	x0, 1f
233	mrs	x1, CORTEX_A76_CPUACTLR2_EL1
234	orr	x1, x1 ,#(1 << 59)
235	msr	CORTEX_A76_CPUACTLR2_EL1, x1
236	isb
2371:
238	ret	x17
239endfunc errata_a76_1130799_wa
240
241func check_errata_1130799
242	mov	x1, #0x20
243	b	cpu_rev_var_ls
244endfunc check_errata_1130799
245
246	/* --------------------------------------------------
247	 * Errata Workaround for Cortex A76 Errata #1220197.
248	 * This applies only to revision <= r2p0 of Cortex A76.
249	 * Inputs:
250	 * x0: variant[4:7] and revision[0:3] of current cpu.
251	 * Shall clobber: x0-x17
252	 * --------------------------------------------------
253	 */
254func errata_a76_1220197_wa
255/*
256 * Compare x0 against revision r2p0
257 */
258	mov	x17, x30
259	bl	check_errata_1220197
260	cbz	x0, 1f
261	mrs	x1, CORTEX_A76_CPUECTLR_EL1
262	orr	x1, x1, #CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
263	msr	CORTEX_A76_CPUECTLR_EL1, x1
264	isb
2651:
266	ret	x17
267endfunc errata_a76_1220197_wa
268
269func check_errata_1220197
270	mov	x1, #0x20
271	b	cpu_rev_var_ls
272endfunc check_errata_1220197
273
274	/* --------------------------------------------------
275	 * Errata Workaround for Cortex A76 Errata #1257314.
276	 * This applies only to revision <= r3p0 of Cortex A76.
277	 * Inputs:
278	 * x0: variant[4:7] and revision[0:3] of current cpu.
279	 * Shall clobber: x0-x17
280	 * --------------------------------------------------
281	 */
282func errata_a76_1257314_wa
283	/*
284	 * Compare x0 against revision r3p0
285	 */
286	mov	x17, x30
287	bl	check_errata_1257314
288	cbz	x0, 1f
289	mrs	x1, CORTEX_A76_CPUACTLR3_EL1
290	orr	x1, x1, CORTEX_A76_CPUACTLR3_EL1_BIT_10
291	msr	CORTEX_A76_CPUACTLR3_EL1, x1
292	isb
2931:
294	ret	x17
295endfunc errata_a76_1257314_wa
296
297func check_errata_1257314
298	mov	x1, #0x30
299	b	cpu_rev_var_ls
300endfunc check_errata_1257314
301
302	/* --------------------------------------------------
303	 * Errata Workaround for Cortex A76 Errata #1262888.
304	 * This applies only to revision <= r3p0 of Cortex A76.
305	 * Inputs:
306	 * x0: variant[4:7] and revision[0:3] of current cpu.
307	 * Shall clobber: x0-x17
308	 * --------------------------------------------------
309	 */
310func errata_a76_1262888_wa
311	/*
312	 * Compare x0 against revision r3p0
313	 */
314	mov	x17, x30
315	bl	check_errata_1262888
316	cbz	x0, 1f
317	mrs	x1, CORTEX_A76_CPUECTLR_EL1
318	orr	x1, x1, CORTEX_A76_CPUECTLR_EL1_BIT_51
319	msr	CORTEX_A76_CPUECTLR_EL1, x1
320	isb
3211:
322	ret	x17
323endfunc errata_a76_1262888_wa
324
325func check_errata_1262888
326	mov	x1, #0x30
327	b	cpu_rev_var_ls
328endfunc check_errata_1262888
329
330	/* --------------------------------------------------
331	 * Errata Workaround for Cortex A76 Errata #1275112
332	 * and Errata #1262606.
333	 * This applies only to revision <= r3p0 of Cortex A76.
334	 * Inputs:
335	 * x0: variant[4:7] and revision[0:3] of current cpu.
336	 * Shall clobber: x0-x17
337	 * --------------------------------------------------
338	 */
339func errata_a76_1275112_1262606_wa
340	/*
341	 * Compare x0 against revision r3p0
342	 */
343	mov	x17, x30
344	/*
345	 * Since both errata #1275112 and #1262606 have the same check, we can
346	 * invoke any one of them for the check here.
347	 */
348	bl	check_errata_1275112
349	cbz	x0, 1f
350	mrs	x1, CORTEX_A76_CPUACTLR_EL1
351	orr	x1, x1, CORTEX_A76_CPUACTLR_EL1_BIT_13
352	msr	CORTEX_A76_CPUACTLR_EL1, x1
353	isb
3541:
355	ret	x17
356endfunc errata_a76_1275112_1262606_wa
357
358func check_errata_1262606
359	mov	x1, #0x30
360	b	cpu_rev_var_ls
361endfunc check_errata_1262606
362
363func check_errata_1275112
364	mov	x1, #0x30
365	b	cpu_rev_var_ls
366endfunc check_errata_1275112
367
368
369func check_errata_cve_2018_3639
370#if WORKAROUND_CVE_2018_3639
371	mov	x0, #ERRATA_APPLIES
372#else
373	mov	x0, #ERRATA_MISSING
374#endif
375	ret
376endfunc check_errata_cve_2018_3639
377
378func cortex_a76_disable_wa_cve_2018_3639
379	mrs	x0, CORTEX_A76_CPUACTLR2_EL1
380	bic	x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
381	msr	CORTEX_A76_CPUACTLR2_EL1, x0
382	isb
383	ret
384endfunc cortex_a76_disable_wa_cve_2018_3639
385
386	/* -------------------------------------------------
387	 * The CPU Ops reset function for Cortex-A76.
388	 * Shall clobber: x0-x19
389	 * -------------------------------------------------
390	 */
391func cortex_a76_reset_func
392	mov	x19, x30
393	bl	cpu_get_rev_var
394	mov	x18, x0
395
396#if ERRATA_A76_1073348
397	mov	x0, x18
398	bl	errata_a76_1073348_wa
399#endif
400
401#if ERRATA_A76_1130799
402	mov	x0, x18
403	bl	errata_a76_1130799_wa
404#endif
405
406#if ERRATA_A76_1220197
407	mov	x0, x18
408	bl	errata_a76_1220197_wa
409#endif
410
411#if ERRATA_A76_1257314
412	mov	x0, x18
413	bl	errata_a76_1257314_wa
414#endif
415
416#if ERRATA_A76_1262606 || ERRATA_A76_1275112
417	mov	x0, x18
418	bl	errata_a76_1275112_1262606_wa
419#endif
420
421#if ERRATA_A76_1262888
422	mov	x0, x18
423	bl	errata_a76_1262888_wa
424#endif
425
426#if WORKAROUND_CVE_2018_3639
427	/* If the PE implements SSBS, we don't need the dynamic workaround */
428	mrs	x0, id_aa64pfr1_el1
429	lsr	x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
430	and	x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
431#if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
432	cmp	x0, 0
433	ASM_ASSERT(ne)
434#endif
435#if DYNAMIC_WORKAROUND_CVE_2018_3639
436	cbnz	x0, 1f
437	mrs	x0, CORTEX_A76_CPUACTLR2_EL1
438	orr	x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
439	msr	CORTEX_A76_CPUACTLR2_EL1, x0
440	isb
441
442#ifdef IMAGE_BL31
443	/*
444	 * The Cortex-A76 generic vectors are overwritten to use the vectors
445	 * defined above. This is required in order to apply mitigation
446	 * against CVE-2018-3639 on exception entry from lower ELs.
447	 */
448	adr	x0, cortex_a76_wa_cve_2018_3639_a76_vbar
449	msr	vbar_el3, x0
450	isb
451#endif /* IMAGE_BL31 */
452
4531:
454#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
455#endif /* WORKAROUND_CVE_2018_3639 */
456
457#if ERRATA_DSU_798953
458	bl	errata_dsu_798953_wa
459#endif
460
461#if ERRATA_DSU_936184
462	bl	errata_dsu_936184_wa
463#endif
464
465	ret	x19
466endfunc cortex_a76_reset_func
467
468	/* ---------------------------------------------
469	 * HW will do the cache maintenance while powering down
470	 * ---------------------------------------------
471	 */
472func cortex_a76_core_pwr_dwn
473	/* ---------------------------------------------
474	 * Enable CPU power down bit in power control register
475	 * ---------------------------------------------
476	 */
477	mrs	x0, CORTEX_A76_CPUPWRCTLR_EL1
478	orr	x0, x0, #CORTEX_A76_CORE_PWRDN_EN_MASK
479	msr	CORTEX_A76_CPUPWRCTLR_EL1, x0
480	isb
481	ret
482endfunc cortex_a76_core_pwr_dwn
483
484#if REPORT_ERRATA
485/*
486 * Errata printing function for Cortex A76. Must follow AAPCS.
487 */
488func cortex_a76_errata_report
489	stp	x8, x30, [sp, #-16]!
490
491	bl	cpu_get_rev_var
492	mov	x8, x0
493
494	/*
495	 * Report all errata. The revision-variant information is passed to
496	 * checking functions of each errata.
497	 */
498	report_errata ERRATA_A76_1073348, cortex_a76, 1073348
499	report_errata ERRATA_A76_1130799, cortex_a76, 1130799
500	report_errata ERRATA_A76_1220197, cortex_a76, 1220197
501	report_errata ERRATA_A76_1257314, cortex_a76, 1257314
502	report_errata ERRATA_A76_1262606, cortex_a76, 1262606
503	report_errata ERRATA_A76_1262888, cortex_a76, 1262888
504	report_errata ERRATA_A76_1275112, cortex_a76, 1275112
505	report_errata WORKAROUND_CVE_2018_3639, cortex_a76, cve_2018_3639
506	report_errata ERRATA_DSU_798953, cortex_a76, dsu_798953
507	report_errata ERRATA_DSU_936184, cortex_a76, dsu_936184
508
509	ldp	x8, x30, [sp], #16
510	ret
511endfunc cortex_a76_errata_report
512#endif
513
514	/* ---------------------------------------------
515	 * This function provides cortex_a76 specific
516	 * register information for crash reporting.
517	 * It needs to return with x6 pointing to
518	 * a list of register names in ascii and
519	 * x8 - x15 having values of registers to be
520	 * reported.
521	 * ---------------------------------------------
522	 */
523.section .rodata.cortex_a76_regs, "aS"
524cortex_a76_regs:  /* The ascii list of register names to be reported */
525	.asciz	"cpuectlr_el1", ""
526
527func cortex_a76_cpu_reg_dump
528	adr	x6, cortex_a76_regs
529	mrs	x8, CORTEX_A76_CPUECTLR_EL1
530	ret
531endfunc cortex_a76_cpu_reg_dump
532
533declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
534	cortex_a76_reset_func, \
535	CPU_NO_EXTRA1_FUNC, \
536	cortex_a76_disable_wa_cve_2018_3639, \
537	cortex_a76_core_pwr_dwn
538