xref: /rk3399_ARM-atf/lib/cpus/aarch64/cortex_a76.S (revision f85edcea5ff651a47fc1434d2d9b5475cc56aa29)
1/*
2 * Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <common/bl_common.h>
10#include <context.h>
11#include <cortex_a76.h>
12#include <cpu_macros.S>
13#include <plat_macros.S>
14#include <services/arm_arch_svc.h>
15
16#define ESR_EL3_A64_SMC0	0x5e000000
17#define ESR_EL3_A32_SMC0	0x4e000000
18
19#if DYNAMIC_WORKAROUND_CVE_2018_3639
20	/*
21	 * This macro applies the mitigation for CVE-2018-3639.
22	 * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
23	 * SMC calls from a lower EL running in AArch32 or AArch64
24	 * will go through the fast and return early.
25	 *
26	 * The macro saves x2-x3 to the context. In the fast path
27	 * x0-x3 registers do not need to be restored as the calling
28	 * context will have saved them.
29	 */
30	.macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
31	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
32
33	.if \_is_sync_exception
34		/*
35		 * Ensure SMC is coming from A64/A32 state on #0
36		 * with W0 = SMCCC_ARCH_WORKAROUND_2
37		 *
38		 * This sequence evaluates as:
39		 *    (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
40		 * allowing use of a single branch operation
41		 */
42		orr	w2, wzr, #SMCCC_ARCH_WORKAROUND_2
43		cmp	x0, x2
44		mrs	x3, esr_el3
45		mov_imm	w2, \_esr_el3_val
46		ccmp	w2, w3, #0, eq
47		/*
48		 * Static predictor will predict a fall-through, optimizing
49		 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
50		 */
51		bne	1f
52
53		/*
54		 * The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
55		 * fast path.
56		 */
57		cmp	x1, xzr /* enable/disable check */
58
59		/*
60		 * When the calling context wants mitigation disabled,
61		 * we program the mitigation disable function in the
62		 * CPU context, which gets invoked on subsequent exits from
63		 * EL3 via the `el3_exit` function. Otherwise NULL is
64		 * programmed in the CPU context, which results in caller's
65		 * inheriting the EL3 mitigation state (enabled) on subsequent
66		 * `el3_exit`.
67		 */
68		mov	x0, xzr
69		adr	x1, cortex_a76_disable_wa_cve_2018_3639
70		csel	x1, x1, x0, eq
71		str	x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
72
73		mrs	x2, CORTEX_A76_CPUACTLR2_EL1
74		orr	x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
75		bic	x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
76		csel	x3, x3, x1, eq
77		msr	CORTEX_A76_CPUACTLR2_EL1, x3
78		eret	/* ERET implies ISB */
79	.endif
801:
81	/*
82	 * Always enable v4 mitigation during EL3 execution. This is not
83	 * required for the fast path above because it does not perform any
84	 * memory loads.
85	 */
86	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
87	orr	x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
88	msr	CORTEX_A76_CPUACTLR2_EL1, x2
89	isb
90
91	/*
92	 * The caller may have passed arguments to EL3 via x2-x3.
93	 * Restore these registers from the context before jumping to the
94	 * main runtime vector table entry.
95	 */
96	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
97	.endm
98
99vector_base cortex_a76_wa_cve_2018_3639_a76_vbar
100
101	/* ---------------------------------------------------------------------
102	 * Current EL with SP_EL0 : 0x0 - 0x200
103	 * ---------------------------------------------------------------------
104	 */
105vector_entry cortex_a76_sync_exception_sp_el0
106	b	sync_exception_sp_el0
107end_vector_entry cortex_a76_sync_exception_sp_el0
108
109vector_entry cortex_a76_irq_sp_el0
110	b	irq_sp_el0
111end_vector_entry cortex_a76_irq_sp_el0
112
113vector_entry cortex_a76_fiq_sp_el0
114	b	fiq_sp_el0
115end_vector_entry cortex_a76_fiq_sp_el0
116
117vector_entry cortex_a76_serror_sp_el0
118	b	serror_sp_el0
119end_vector_entry cortex_a76_serror_sp_el0
120
121	/* ---------------------------------------------------------------------
122	 * Current EL with SP_ELx: 0x200 - 0x400
123	 * ---------------------------------------------------------------------
124	 */
125vector_entry cortex_a76_sync_exception_sp_elx
126	b	sync_exception_sp_elx
127end_vector_entry cortex_a76_sync_exception_sp_elx
128
129vector_entry cortex_a76_irq_sp_elx
130	b	irq_sp_elx
131end_vector_entry cortex_a76_irq_sp_elx
132
133vector_entry cortex_a76_fiq_sp_elx
134	b	fiq_sp_elx
135end_vector_entry cortex_a76_fiq_sp_elx
136
137vector_entry cortex_a76_serror_sp_elx
138	b	serror_sp_elx
139end_vector_entry cortex_a76_serror_sp_elx
140
141	/* ---------------------------------------------------------------------
142	 * Lower EL using AArch64 : 0x400 - 0x600
143	 * ---------------------------------------------------------------------
144	 */
145vector_entry cortex_a76_sync_exception_aarch64
146	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
147	b	sync_exception_aarch64
148end_vector_entry cortex_a76_sync_exception_aarch64
149
150vector_entry cortex_a76_irq_aarch64
151	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
152	b	irq_aarch64
153end_vector_entry cortex_a76_irq_aarch64
154
155vector_entry cortex_a76_fiq_aarch64
156	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
157	b	fiq_aarch64
158end_vector_entry cortex_a76_fiq_aarch64
159
160vector_entry cortex_a76_serror_aarch64
161	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
162	b	serror_aarch64
163end_vector_entry cortex_a76_serror_aarch64
164
165	/* ---------------------------------------------------------------------
166	 * Lower EL using AArch32 : 0x600 - 0x800
167	 * ---------------------------------------------------------------------
168	 */
169vector_entry cortex_a76_sync_exception_aarch32
170	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
171	b	sync_exception_aarch32
172end_vector_entry cortex_a76_sync_exception_aarch32
173
174vector_entry cortex_a76_irq_aarch32
175	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
176	b	irq_aarch32
177end_vector_entry cortex_a76_irq_aarch32
178
179vector_entry cortex_a76_fiq_aarch32
180	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
181	b	fiq_aarch32
182end_vector_entry cortex_a76_fiq_aarch32
183
184vector_entry cortex_a76_serror_aarch32
185	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
186	b	serror_aarch32
187end_vector_entry cortex_a76_serror_aarch32
188#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
189
190	/* --------------------------------------------------
191	 * Errata Workaround for Cortex A76 Errata #1073348.
192	 * This applies only to revision <= r1p0 of Cortex A76.
193	 * Inputs:
194	 * x0: variant[4:7] and revision[0:3] of current cpu.
195	 * Shall clobber: x0-x17
196	 * --------------------------------------------------
197	 */
198func errata_a76_1073348_wa
199	/*
200	 * Compare x0 against revision r1p0
201	 */
202	mov	x17, x30
203	bl	check_errata_1073348
204	cbz	x0, 1f
205	mrs	x1, CORTEX_A76_CPUACTLR_EL1
206	orr	x1, x1 ,#CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
207	msr	CORTEX_A76_CPUACTLR_EL1, x1
208	isb
2091:
210	ret	x17
211endfunc errata_a76_1073348_wa
212
213func check_errata_1073348
214	mov	x1, #0x10
215	b	cpu_rev_var_ls
216endfunc check_errata_1073348
217
218	/* --------------------------------------------------
219	 * Errata Workaround for Cortex A76 Errata #1130799.
220	 * This applies only to revision <= r2p0 of Cortex A76.
221	 * Inputs:
222	 * x0: variant[4:7] and revision[0:3] of current cpu.
223	 * Shall clobber: x0-x17
224	 * --------------------------------------------------
225	 */
226func errata_a76_1130799_wa
227	/*
228	 * Compare x0 against revision r2p0
229	 */
230	mov	x17, x30
231	bl	check_errata_1130799
232	cbz	x0, 1f
233	mrs	x1, CORTEX_A76_CPUACTLR2_EL1
234	orr	x1, x1 ,#(1 << 59)
235	msr	CORTEX_A76_CPUACTLR2_EL1, x1
236	isb
2371:
238	ret	x17
239endfunc errata_a76_1130799_wa
240
241func check_errata_1130799
242	mov	x1, #0x20
243	b	cpu_rev_var_ls
244endfunc check_errata_1130799
245
246	/* --------------------------------------------------
247	 * Errata Workaround for Cortex A76 Errata #1220197.
248	 * This applies only to revision <= r2p0 of Cortex A76.
249	 * Inputs:
250	 * x0: variant[4:7] and revision[0:3] of current cpu.
251	 * Shall clobber: x0-x17
252	 * --------------------------------------------------
253	 */
254func errata_a76_1220197_wa
255/*
256 * Compare x0 against revision r2p0
257 */
258	mov	x17, x30
259	bl	check_errata_1220197
260	cbz	x0, 1f
261	mrs	x1, CORTEX_A76_CPUECTLR_EL1
262	orr	x1, x1, #CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
263	msr	CORTEX_A76_CPUECTLR_EL1, x1
264	isb
2651:
266	ret	x17
267endfunc errata_a76_1220197_wa
268
269func check_errata_1220197
270	mov	x1, #0x20
271	b	cpu_rev_var_ls
272endfunc check_errata_1220197
273
274	/* --------------------------------------------------
275	 * Errata Workaround for Cortex A76 Errata #1257314.
276	 * This applies only to revision <= r3p0 of Cortex A76.
277	 * Inputs:
278	 * x0: variant[4:7] and revision[0:3] of current cpu.
279	 * Shall clobber: x0-x17
280	 * --------------------------------------------------
281	 */
282func errata_a76_1257314_wa
283	/*
284	 * Compare x0 against revision r3p0
285	 */
286	mov	x17, x30
287	bl	check_errata_1257314
288	cbz	x0, 1f
289	mrs	x1, CORTEX_A76_CPUACTLR3_EL1
290	orr	x1, x1, CORTEX_A76_CPUACTLR3_EL1_BIT_10
291	msr	CORTEX_A76_CPUACTLR3_EL1, x1
292	isb
2931:
294	ret	x17
295endfunc errata_a76_1257314_wa
296
297func check_errata_1257314
298	mov	x1, #0x30
299	b	cpu_rev_var_ls
300endfunc check_errata_1257314
301
302	/* --------------------------------------------------
303	 * Errata Workaround for Cortex A76 Errata #1262888.
304	 * This applies only to revision <= r3p0 of Cortex A76.
305	 * Inputs:
306	 * x0: variant[4:7] and revision[0:3] of current cpu.
307	 * Shall clobber: x0-x17
308	 * --------------------------------------------------
309	 */
310func errata_a76_1262888_wa
311	/*
312	 * Compare x0 against revision r3p0
313	 */
314	mov	x17, x30
315	bl	check_errata_1262888
316	cbz	x0, 1f
317	mrs	x1, CORTEX_A76_CPUECTLR_EL1
318	orr	x1, x1, CORTEX_A76_CPUECTLR_EL1_BIT_51
319	msr	CORTEX_A76_CPUECTLR_EL1, x1
320	isb
3211:
322	ret	x17
323endfunc errata_a76_1262888_wa
324
325func check_errata_1262888
326	mov	x1, #0x30
327	b	cpu_rev_var_ls
328endfunc check_errata_1262888
329
330	/* --------------------------------------------------
331	 * Errata Workaround for Cortex A76 Errata #1275112
332	 * and Errata #1262606.
333	 * This applies only to revision <= r3p0 of Cortex A76.
334	 * Inputs:
335	 * x0: variant[4:7] and revision[0:3] of current cpu.
336	 * Shall clobber: x0-x17
337	 * --------------------------------------------------
338	 */
339func errata_a76_1275112_1262606_wa
340	/*
341	 * Compare x0 against revision r3p0
342	 */
343	mov	x17, x30
344	/*
345	 * Since both errata #1275112 and #1262606 have the same check, we can
346	 * invoke any one of them for the check here.
347	 */
348	bl	check_errata_1275112
349	cbz	x0, 1f
350	mrs	x1, CORTEX_A76_CPUACTLR_EL1
351	orr	x1, x1, CORTEX_A76_CPUACTLR_EL1_BIT_13
352	msr	CORTEX_A76_CPUACTLR_EL1, x1
353	isb
3541:
355	ret	x17
356endfunc errata_a76_1275112_1262606_wa
357
358func check_errata_1262606
359	mov	x1, #0x30
360	b	cpu_rev_var_ls
361endfunc check_errata_1262606
362
363func check_errata_1275112
364	mov	x1, #0x30
365	b	cpu_rev_var_ls
366endfunc check_errata_1275112
367
368	/* ---------------------------------------------------
369	 * Errata Workaround for Cortex A76 Errata #1286807.
370	 * This applies only to revision <= r3p0 of Cortex A76.
371	 * Due to the nature of the errata it is applied unconditionally
372	 * when built in, report it as applicable in this case
373	 * ---------------------------------------------------
374	 */
375func check_errata_1286807
376#if ERRATA_A76_1286807
377	mov x0, #ERRATA_APPLIES
378	ret
379#else
380	mov	x1, #0x30
381	b	cpu_rev_var_ls
382#endif
383endfunc check_errata_1286807
384
385func check_errata_cve_2018_3639
386#if WORKAROUND_CVE_2018_3639
387	mov	x0, #ERRATA_APPLIES
388#else
389	mov	x0, #ERRATA_MISSING
390#endif
391	ret
392endfunc check_errata_cve_2018_3639
393
394func cortex_a76_disable_wa_cve_2018_3639
395	mrs	x0, CORTEX_A76_CPUACTLR2_EL1
396	bic	x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
397	msr	CORTEX_A76_CPUACTLR2_EL1, x0
398	isb
399	ret
400endfunc cortex_a76_disable_wa_cve_2018_3639
401
402	/* -------------------------------------------------
403	 * The CPU Ops reset function for Cortex-A76.
404	 * Shall clobber: x0-x19
405	 * -------------------------------------------------
406	 */
407func cortex_a76_reset_func
408	mov	x19, x30
409	bl	cpu_get_rev_var
410	mov	x18, x0
411
412#if ERRATA_A76_1073348
413	mov	x0, x18
414	bl	errata_a76_1073348_wa
415#endif
416
417#if ERRATA_A76_1130799
418	mov	x0, x18
419	bl	errata_a76_1130799_wa
420#endif
421
422#if ERRATA_A76_1220197
423	mov	x0, x18
424	bl	errata_a76_1220197_wa
425#endif
426
427#if ERRATA_A76_1257314
428	mov	x0, x18
429	bl	errata_a76_1257314_wa
430#endif
431
432#if ERRATA_A76_1262606 || ERRATA_A76_1275112
433	mov	x0, x18
434	bl	errata_a76_1275112_1262606_wa
435#endif
436
437#if ERRATA_A76_1262888
438	mov	x0, x18
439	bl	errata_a76_1262888_wa
440#endif
441
442#if WORKAROUND_CVE_2018_3639
443	/* If the PE implements SSBS, we don't need the dynamic workaround */
444	mrs	x0, id_aa64pfr1_el1
445	lsr	x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
446	and	x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
447#if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
448	cmp	x0, 0
449	ASM_ASSERT(ne)
450#endif
451#if DYNAMIC_WORKAROUND_CVE_2018_3639
452	cbnz	x0, 1f
453	mrs	x0, CORTEX_A76_CPUACTLR2_EL1
454	orr	x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
455	msr	CORTEX_A76_CPUACTLR2_EL1, x0
456	isb
457
458#ifdef IMAGE_BL31
459	/*
460	 * The Cortex-A76 generic vectors are overwritten to use the vectors
461	 * defined above. This is required in order to apply mitigation
462	 * against CVE-2018-3639 on exception entry from lower ELs.
463	 */
464	adr	x0, cortex_a76_wa_cve_2018_3639_a76_vbar
465	msr	vbar_el3, x0
466	isb
467#endif /* IMAGE_BL31 */
468
4691:
470#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
471#endif /* WORKAROUND_CVE_2018_3639 */
472
473#if ERRATA_DSU_798953
474	bl	errata_dsu_798953_wa
475#endif
476
477#if ERRATA_DSU_936184
478	bl	errata_dsu_936184_wa
479#endif
480
481	ret	x19
482endfunc cortex_a76_reset_func
483
484	/* ---------------------------------------------
485	 * HW will do the cache maintenance while powering down
486	 * ---------------------------------------------
487	 */
488func cortex_a76_core_pwr_dwn
489	/* ---------------------------------------------
490	 * Enable CPU power down bit in power control register
491	 * ---------------------------------------------
492	 */
493	mrs	x0, CORTEX_A76_CPUPWRCTLR_EL1
494	orr	x0, x0, #CORTEX_A76_CORE_PWRDN_EN_MASK
495	msr	CORTEX_A76_CPUPWRCTLR_EL1, x0
496	isb
497	ret
498endfunc cortex_a76_core_pwr_dwn
499
500#if REPORT_ERRATA
501/*
502 * Errata printing function for Cortex A76. Must follow AAPCS.
503 */
504func cortex_a76_errata_report
505	stp	x8, x30, [sp, #-16]!
506
507	bl	cpu_get_rev_var
508	mov	x8, x0
509
510	/*
511	 * Report all errata. The revision-variant information is passed to
512	 * checking functions of each errata.
513	 */
514	report_errata ERRATA_A76_1073348, cortex_a76, 1073348
515	report_errata ERRATA_A76_1130799, cortex_a76, 1130799
516	report_errata ERRATA_A76_1220197, cortex_a76, 1220197
517	report_errata ERRATA_A76_1257314, cortex_a76, 1257314
518	report_errata ERRATA_A76_1262606, cortex_a76, 1262606
519	report_errata ERRATA_A76_1262888, cortex_a76, 1262888
520	report_errata ERRATA_A76_1275112, cortex_a76, 1275112
521	report_errata ERRATA_A76_1286807, cortex_a76, 1286807
522	report_errata WORKAROUND_CVE_2018_3639, cortex_a76, cve_2018_3639
523	report_errata ERRATA_DSU_798953, cortex_a76, dsu_798953
524	report_errata ERRATA_DSU_936184, cortex_a76, dsu_936184
525
526	ldp	x8, x30, [sp], #16
527	ret
528endfunc cortex_a76_errata_report
529#endif
530
531	/* ---------------------------------------------
532	 * This function provides cortex_a76 specific
533	 * register information for crash reporting.
534	 * It needs to return with x6 pointing to
535	 * a list of register names in ascii and
536	 * x8 - x15 having values of registers to be
537	 * reported.
538	 * ---------------------------------------------
539	 */
540.section .rodata.cortex_a76_regs, "aS"
541cortex_a76_regs:  /* The ascii list of register names to be reported */
542	.asciz	"cpuectlr_el1", ""
543
544func cortex_a76_cpu_reg_dump
545	adr	x6, cortex_a76_regs
546	mrs	x8, CORTEX_A76_CPUECTLR_EL1
547	ret
548endfunc cortex_a76_cpu_reg_dump
549
550declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
551	cortex_a76_reset_func, \
552	CPU_NO_EXTRA1_FUNC, \
553	cortex_a76_disable_wa_cve_2018_3639, \
554	cortex_a76_core_pwr_dwn
555