xref: /rk3399_ARM-atf/lib/cpus/aarch64/cortex_a76.S (revision 40d553cfde38d4f68449c62967cd1ce0d6478750)
1/*
2 * Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <common/bl_common.h>
10#include <context.h>
11#include <cortex_a76.h>
12#include <cpu_macros.S>
13#include <plat_macros.S>
14#include <services/arm_arch_svc.h>
15
16/* Hardware handled coherency */
17#if HW_ASSISTED_COHERENCY == 0
18#error "Cortex-A76 must be compiled with HW_ASSISTED_COHERENCY enabled"
19#endif
20
21#define ESR_EL3_A64_SMC0	0x5e000000
22#define ESR_EL3_A32_SMC0	0x4e000000
23
24#if DYNAMIC_WORKAROUND_CVE_2018_3639
25	/*
26	 * This macro applies the mitigation for CVE-2018-3639.
27	 * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
28	 * SMC calls from a lower EL running in AArch32 or AArch64
29	 * will go through the fast and return early.
30	 *
31	 * The macro saves x2-x3 to the context. In the fast path
32	 * x0-x3 registers do not need to be restored as the calling
33	 * context will have saved them.
34	 */
35	.macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
36	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
37
38	.if \_is_sync_exception
39		/*
40		 * Ensure SMC is coming from A64/A32 state on #0
41		 * with W0 = SMCCC_ARCH_WORKAROUND_2
42		 *
43		 * This sequence evaluates as:
44		 *    (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
45		 * allowing use of a single branch operation
46		 */
47		orr	w2, wzr, #SMCCC_ARCH_WORKAROUND_2
48		cmp	x0, x2
49		mrs	x3, esr_el3
50		mov_imm	w2, \_esr_el3_val
51		ccmp	w2, w3, #0, eq
52		/*
53		 * Static predictor will predict a fall-through, optimizing
54		 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
55		 */
56		bne	1f
57
58		/*
59		 * The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
60		 * fast path.
61		 */
62		cmp	x1, xzr /* enable/disable check */
63
64		/*
65		 * When the calling context wants mitigation disabled,
66		 * we program the mitigation disable function in the
67		 * CPU context, which gets invoked on subsequent exits from
68		 * EL3 via the `el3_exit` function. Otherwise NULL is
69		 * programmed in the CPU context, which results in caller's
70		 * inheriting the EL3 mitigation state (enabled) on subsequent
71		 * `el3_exit`.
72		 */
73		mov	x0, xzr
74		adr	x1, cortex_a76_disable_wa_cve_2018_3639
75		csel	x1, x1, x0, eq
76		str	x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
77
78		mrs	x2, CORTEX_A76_CPUACTLR2_EL1
79		orr	x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
80		bic	x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
81		csel	x3, x3, x1, eq
82		msr	CORTEX_A76_CPUACTLR2_EL1, x3
83		eret	/* ERET implies ISB */
84	.endif
851:
86	/*
87	 * Always enable v4 mitigation during EL3 execution. This is not
88	 * required for the fast path above because it does not perform any
89	 * memory loads.
90	 */
91	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
92	orr	x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
93	msr	CORTEX_A76_CPUACTLR2_EL1, x2
94	isb
95
96	/*
97	 * The caller may have passed arguments to EL3 via x2-x3.
98	 * Restore these registers from the context before jumping to the
99	 * main runtime vector table entry.
100	 */
101	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
102	.endm
103
104vector_base cortex_a76_wa_cve_2018_3639_a76_vbar
105
106	/* ---------------------------------------------------------------------
107	 * Current EL with SP_EL0 : 0x0 - 0x200
108	 * ---------------------------------------------------------------------
109	 */
110vector_entry cortex_a76_sync_exception_sp_el0
111	b	sync_exception_sp_el0
112end_vector_entry cortex_a76_sync_exception_sp_el0
113
114vector_entry cortex_a76_irq_sp_el0
115	b	irq_sp_el0
116end_vector_entry cortex_a76_irq_sp_el0
117
118vector_entry cortex_a76_fiq_sp_el0
119	b	fiq_sp_el0
120end_vector_entry cortex_a76_fiq_sp_el0
121
122vector_entry cortex_a76_serror_sp_el0
123	b	serror_sp_el0
124end_vector_entry cortex_a76_serror_sp_el0
125
126	/* ---------------------------------------------------------------------
127	 * Current EL with SP_ELx: 0x200 - 0x400
128	 * ---------------------------------------------------------------------
129	 */
130vector_entry cortex_a76_sync_exception_sp_elx
131	b	sync_exception_sp_elx
132end_vector_entry cortex_a76_sync_exception_sp_elx
133
134vector_entry cortex_a76_irq_sp_elx
135	b	irq_sp_elx
136end_vector_entry cortex_a76_irq_sp_elx
137
138vector_entry cortex_a76_fiq_sp_elx
139	b	fiq_sp_elx
140end_vector_entry cortex_a76_fiq_sp_elx
141
142vector_entry cortex_a76_serror_sp_elx
143	b	serror_sp_elx
144end_vector_entry cortex_a76_serror_sp_elx
145
146	/* ---------------------------------------------------------------------
147	 * Lower EL using AArch64 : 0x400 - 0x600
148	 * ---------------------------------------------------------------------
149	 */
150vector_entry cortex_a76_sync_exception_aarch64
151	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
152	b	sync_exception_aarch64
153end_vector_entry cortex_a76_sync_exception_aarch64
154
155vector_entry cortex_a76_irq_aarch64
156	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
157	b	irq_aarch64
158end_vector_entry cortex_a76_irq_aarch64
159
160vector_entry cortex_a76_fiq_aarch64
161	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
162	b	fiq_aarch64
163end_vector_entry cortex_a76_fiq_aarch64
164
165vector_entry cortex_a76_serror_aarch64
166	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
167	b	serror_aarch64
168end_vector_entry cortex_a76_serror_aarch64
169
170	/* ---------------------------------------------------------------------
171	 * Lower EL using AArch32 : 0x600 - 0x800
172	 * ---------------------------------------------------------------------
173	 */
174vector_entry cortex_a76_sync_exception_aarch32
175	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
176	b	sync_exception_aarch32
177end_vector_entry cortex_a76_sync_exception_aarch32
178
179vector_entry cortex_a76_irq_aarch32
180	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
181	b	irq_aarch32
182end_vector_entry cortex_a76_irq_aarch32
183
184vector_entry cortex_a76_fiq_aarch32
185	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
186	b	fiq_aarch32
187end_vector_entry cortex_a76_fiq_aarch32
188
189vector_entry cortex_a76_serror_aarch32
190	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
191	b	serror_aarch32
192end_vector_entry cortex_a76_serror_aarch32
193#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
194
195	/* --------------------------------------------------
196	 * Errata Workaround for Cortex A76 Errata #1073348.
197	 * This applies only to revision <= r1p0 of Cortex A76.
198	 * Inputs:
199	 * x0: variant[4:7] and revision[0:3] of current cpu.
200	 * Shall clobber: x0-x17
201	 * --------------------------------------------------
202	 */
203func errata_a76_1073348_wa
204	/*
205	 * Compare x0 against revision r1p0
206	 */
207	mov	x17, x30
208	bl	check_errata_1073348
209	cbz	x0, 1f
210	mrs	x1, CORTEX_A76_CPUACTLR_EL1
211	orr	x1, x1 ,#CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
212	msr	CORTEX_A76_CPUACTLR_EL1, x1
213	isb
2141:
215	ret	x17
216endfunc errata_a76_1073348_wa
217
218func check_errata_1073348
219	mov	x1, #0x10
220	b	cpu_rev_var_ls
221endfunc check_errata_1073348
222
223	/* --------------------------------------------------
224	 * Errata Workaround for Cortex A76 Errata #1130799.
225	 * This applies only to revision <= r2p0 of Cortex A76.
226	 * Inputs:
227	 * x0: variant[4:7] and revision[0:3] of current cpu.
228	 * Shall clobber: x0-x17
229	 * --------------------------------------------------
230	 */
231func errata_a76_1130799_wa
232	/*
233	 * Compare x0 against revision r2p0
234	 */
235	mov	x17, x30
236	bl	check_errata_1130799
237	cbz	x0, 1f
238	mrs	x1, CORTEX_A76_CPUACTLR2_EL1
239	orr	x1, x1 ,#(1 << 59)
240	msr	CORTEX_A76_CPUACTLR2_EL1, x1
241	isb
2421:
243	ret	x17
244endfunc errata_a76_1130799_wa
245
246func check_errata_1130799
247	mov	x1, #0x20
248	b	cpu_rev_var_ls
249endfunc check_errata_1130799
250
251	/* --------------------------------------------------
252	 * Errata Workaround for Cortex A76 Errata #1220197.
253	 * This applies only to revision <= r2p0 of Cortex A76.
254	 * Inputs:
255	 * x0: variant[4:7] and revision[0:3] of current cpu.
256	 * Shall clobber: x0-x17
257	 * --------------------------------------------------
258	 */
259func errata_a76_1220197_wa
260/*
261 * Compare x0 against revision r2p0
262 */
263	mov	x17, x30
264	bl	check_errata_1220197
265	cbz	x0, 1f
266	mrs	x1, CORTEX_A76_CPUECTLR_EL1
267	orr	x1, x1, #CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
268	msr	CORTEX_A76_CPUECTLR_EL1, x1
269	isb
2701:
271	ret	x17
272endfunc errata_a76_1220197_wa
273
274func check_errata_1220197
275	mov	x1, #0x20
276	b	cpu_rev_var_ls
277endfunc check_errata_1220197
278
279	/* --------------------------------------------------
280	 * Errata Workaround for Cortex A76 Errata #1257314.
281	 * This applies only to revision <= r3p0 of Cortex A76.
282	 * Inputs:
283	 * x0: variant[4:7] and revision[0:3] of current cpu.
284	 * Shall clobber: x0-x17
285	 * --------------------------------------------------
286	 */
287func errata_a76_1257314_wa
288	/*
289	 * Compare x0 against revision r3p0
290	 */
291	mov	x17, x30
292	bl	check_errata_1257314
293	cbz	x0, 1f
294	mrs	x1, CORTEX_A76_CPUACTLR3_EL1
295	orr	x1, x1, CORTEX_A76_CPUACTLR3_EL1_BIT_10
296	msr	CORTEX_A76_CPUACTLR3_EL1, x1
297	isb
2981:
299	ret	x17
300endfunc errata_a76_1257314_wa
301
302func check_errata_1257314
303	mov	x1, #0x30
304	b	cpu_rev_var_ls
305endfunc check_errata_1257314
306
307	/* --------------------------------------------------
308	 * Errata Workaround for Cortex A76 Errata #1262888.
309	 * This applies only to revision <= r3p0 of Cortex A76.
310	 * Inputs:
311	 * x0: variant[4:7] and revision[0:3] of current cpu.
312	 * Shall clobber: x0-x17
313	 * --------------------------------------------------
314	 */
315func errata_a76_1262888_wa
316	/*
317	 * Compare x0 against revision r3p0
318	 */
319	mov	x17, x30
320	bl	check_errata_1262888
321	cbz	x0, 1f
322	mrs	x1, CORTEX_A76_CPUECTLR_EL1
323	orr	x1, x1, CORTEX_A76_CPUECTLR_EL1_BIT_51
324	msr	CORTEX_A76_CPUECTLR_EL1, x1
325	isb
3261:
327	ret	x17
328endfunc errata_a76_1262888_wa
329
330func check_errata_1262888
331	mov	x1, #0x30
332	b	cpu_rev_var_ls
333endfunc check_errata_1262888
334
335	/* --------------------------------------------------
336	 * Errata Workaround for Cortex A76 Errata #1275112
337	 * and Errata #1262606.
338	 * This applies only to revision <= r3p0 of Cortex A76.
339	 * Inputs:
340	 * x0: variant[4:7] and revision[0:3] of current cpu.
341	 * Shall clobber: x0-x17
342	 * --------------------------------------------------
343	 */
344func errata_a76_1275112_1262606_wa
345	/*
346	 * Compare x0 against revision r3p0
347	 */
348	mov	x17, x30
349	/*
350	 * Since both errata #1275112 and #1262606 have the same check, we can
351	 * invoke any one of them for the check here.
352	 */
353	bl	check_errata_1275112
354	cbz	x0, 1f
355	mrs	x1, CORTEX_A76_CPUACTLR_EL1
356	orr	x1, x1, CORTEX_A76_CPUACTLR_EL1_BIT_13
357	msr	CORTEX_A76_CPUACTLR_EL1, x1
358	isb
3591:
360	ret	x17
361endfunc errata_a76_1275112_1262606_wa
362
363func check_errata_1262606
364	mov	x1, #0x30
365	b	cpu_rev_var_ls
366endfunc check_errata_1262606
367
368func check_errata_1275112
369	mov	x1, #0x30
370	b	cpu_rev_var_ls
371endfunc check_errata_1275112
372
373	/* ---------------------------------------------------
374	 * Errata Workaround for Cortex A76 Errata #1286807.
375	 * This applies only to revision <= r3p0 of Cortex A76.
376	 * Due to the nature of the errata it is applied unconditionally
377	 * when built in, report it as applicable in this case
378	 * ---------------------------------------------------
379	 */
380func check_errata_1286807
381#if ERRATA_A76_1286807
382	mov x0, #ERRATA_APPLIES
383	ret
384#else
385	mov	x1, #0x30
386	b	cpu_rev_var_ls
387#endif
388endfunc check_errata_1286807
389
390func check_errata_cve_2018_3639
391#if WORKAROUND_CVE_2018_3639
392	mov	x0, #ERRATA_APPLIES
393#else
394	mov	x0, #ERRATA_MISSING
395#endif
396	ret
397endfunc check_errata_cve_2018_3639
398
399func cortex_a76_disable_wa_cve_2018_3639
400	mrs	x0, CORTEX_A76_CPUACTLR2_EL1
401	bic	x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
402	msr	CORTEX_A76_CPUACTLR2_EL1, x0
403	isb
404	ret
405endfunc cortex_a76_disable_wa_cve_2018_3639
406
407	/* -------------------------------------------------
408	 * The CPU Ops reset function for Cortex-A76.
409	 * Shall clobber: x0-x19
410	 * -------------------------------------------------
411	 */
412func cortex_a76_reset_func
413	mov	x19, x30
414	bl	cpu_get_rev_var
415	mov	x18, x0
416
417#if ERRATA_A76_1073348
418	mov	x0, x18
419	bl	errata_a76_1073348_wa
420#endif
421
422#if ERRATA_A76_1130799
423	mov	x0, x18
424	bl	errata_a76_1130799_wa
425#endif
426
427#if ERRATA_A76_1220197
428	mov	x0, x18
429	bl	errata_a76_1220197_wa
430#endif
431
432#if ERRATA_A76_1257314
433	mov	x0, x18
434	bl	errata_a76_1257314_wa
435#endif
436
437#if ERRATA_A76_1262606 || ERRATA_A76_1275112
438	mov	x0, x18
439	bl	errata_a76_1275112_1262606_wa
440#endif
441
442#if ERRATA_A76_1262888
443	mov	x0, x18
444	bl	errata_a76_1262888_wa
445#endif
446
447#if WORKAROUND_CVE_2018_3639
448	/* If the PE implements SSBS, we don't need the dynamic workaround */
449	mrs	x0, id_aa64pfr1_el1
450	lsr	x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
451	and	x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
452#if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
453	cmp	x0, 0
454	ASM_ASSERT(ne)
455#endif
456#if DYNAMIC_WORKAROUND_CVE_2018_3639
457	cbnz	x0, 1f
458	mrs	x0, CORTEX_A76_CPUACTLR2_EL1
459	orr	x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
460	msr	CORTEX_A76_CPUACTLR2_EL1, x0
461	isb
462
463#ifdef IMAGE_BL31
464	/*
465	 * The Cortex-A76 generic vectors are overwritten to use the vectors
466	 * defined above. This is required in order to apply mitigation
467	 * against CVE-2018-3639 on exception entry from lower ELs.
468	 */
469	adr	x0, cortex_a76_wa_cve_2018_3639_a76_vbar
470	msr	vbar_el3, x0
471	isb
472#endif /* IMAGE_BL31 */
473
4741:
475#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
476#endif /* WORKAROUND_CVE_2018_3639 */
477
478#if ERRATA_DSU_798953
479	bl	errata_dsu_798953_wa
480#endif
481
482#if ERRATA_DSU_936184
483	bl	errata_dsu_936184_wa
484#endif
485
486	ret	x19
487endfunc cortex_a76_reset_func
488
489	/* ---------------------------------------------
490	 * HW will do the cache maintenance while powering down
491	 * ---------------------------------------------
492	 */
493func cortex_a76_core_pwr_dwn
494	/* ---------------------------------------------
495	 * Enable CPU power down bit in power control register
496	 * ---------------------------------------------
497	 */
498	mrs	x0, CORTEX_A76_CPUPWRCTLR_EL1
499	orr	x0, x0, #CORTEX_A76_CORE_PWRDN_EN_MASK
500	msr	CORTEX_A76_CPUPWRCTLR_EL1, x0
501	isb
502	ret
503endfunc cortex_a76_core_pwr_dwn
504
505#if REPORT_ERRATA
506/*
507 * Errata printing function for Cortex A76. Must follow AAPCS.
508 */
509func cortex_a76_errata_report
510	stp	x8, x30, [sp, #-16]!
511
512	bl	cpu_get_rev_var
513	mov	x8, x0
514
515	/*
516	 * Report all errata. The revision-variant information is passed to
517	 * checking functions of each errata.
518	 */
519	report_errata ERRATA_A76_1073348, cortex_a76, 1073348
520	report_errata ERRATA_A76_1130799, cortex_a76, 1130799
521	report_errata ERRATA_A76_1220197, cortex_a76, 1220197
522	report_errata ERRATA_A76_1257314, cortex_a76, 1257314
523	report_errata ERRATA_A76_1262606, cortex_a76, 1262606
524	report_errata ERRATA_A76_1262888, cortex_a76, 1262888
525	report_errata ERRATA_A76_1275112, cortex_a76, 1275112
526	report_errata ERRATA_A76_1286807, cortex_a76, 1286807
527	report_errata WORKAROUND_CVE_2018_3639, cortex_a76, cve_2018_3639
528	report_errata ERRATA_DSU_798953, cortex_a76, dsu_798953
529	report_errata ERRATA_DSU_936184, cortex_a76, dsu_936184
530
531	ldp	x8, x30, [sp], #16
532	ret
533endfunc cortex_a76_errata_report
534#endif
535
536	/* ---------------------------------------------
537	 * This function provides cortex_a76 specific
538	 * register information for crash reporting.
539	 * It needs to return with x6 pointing to
540	 * a list of register names in ascii and
541	 * x8 - x15 having values of registers to be
542	 * reported.
543	 * ---------------------------------------------
544	 */
545.section .rodata.cortex_a76_regs, "aS"
546cortex_a76_regs:  /* The ascii list of register names to be reported */
547	.asciz	"cpuectlr_el1", ""
548
549func cortex_a76_cpu_reg_dump
550	adr	x6, cortex_a76_regs
551	mrs	x8, CORTEX_A76_CPUECTLR_EL1
552	ret
553endfunc cortex_a76_cpu_reg_dump
554
555declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
556	cortex_a76_reset_func, \
557	CPU_NO_EXTRA1_FUNC, \
558	cortex_a76_disable_wa_cve_2018_3639, \
559	cortex_a76_core_pwr_dwn
560