xref: /rk3399_ARM-atf/lib/cpus/aarch64/cortex_a76.S (revision fd7b287cbe9147ca9e07dd9f30c49c58bbdd92a8)
1/*
2 * Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <common/bl_common.h>
10#include <context.h>
11#include <cortex_a76.h>
12#include <cpu_macros.S>
13#include <plat_macros.S>
14#include <services/arm_arch_svc.h>
15
16#define ESR_EL3_A64_SMC0	0x5e000000
17#define ESR_EL3_A32_SMC0	0x4e000000
18
19#if DYNAMIC_WORKAROUND_CVE_2018_3639
20	/*
21	 * This macro applies the mitigation for CVE-2018-3639.
22	 * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
23	 * SMC calls from a lower EL running in AArch32 or AArch64
24	 * will go through the fast and return early.
25	 *
26	 * The macro saves x2-x3 to the context. In the fast path
27	 * x0-x3 registers do not need to be restored as the calling
28	 * context will have saved them.
29	 */
30	.macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
31	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
32
33	.if \_is_sync_exception
34		/*
35		 * Ensure SMC is coming from A64/A32 state on #0
36		 * with W0 = SMCCC_ARCH_WORKAROUND_2
37		 *
38		 * This sequence evaluates as:
39		 *    (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
40		 * allowing use of a single branch operation
41		 */
42		orr	w2, wzr, #SMCCC_ARCH_WORKAROUND_2
43		cmp	x0, x2
44		mrs	x3, esr_el3
45		mov_imm	w2, \_esr_el3_val
46		ccmp	w2, w3, #0, eq
47		/*
48		 * Static predictor will predict a fall-through, optimizing
49		 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
50		 */
51		bne	1f
52
53		/*
54		 * The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
55		 * fast path.
56		 */
57		cmp	x1, xzr /* enable/disable check */
58
59		/*
60		 * When the calling context wants mitigation disabled,
61		 * we program the mitigation disable function in the
62		 * CPU context, which gets invoked on subsequent exits from
63		 * EL3 via the `el3_exit` function. Otherwise NULL is
64		 * programmed in the CPU context, which results in caller's
65		 * inheriting the EL3 mitigation state (enabled) on subsequent
66		 * `el3_exit`.
67		 */
68		mov	x0, xzr
69		adr	x1, cortex_a76_disable_wa_cve_2018_3639
70		csel	x1, x1, x0, eq
71		str	x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
72
73		mrs	x2, CORTEX_A76_CPUACTLR2_EL1
74		orr	x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
75		bic	x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
76		csel	x3, x3, x1, eq
77		msr	CORTEX_A76_CPUACTLR2_EL1, x3
78		eret	/* ERET implies ISB */
79	.endif
801:
81	/*
82	 * Always enable v4 mitigation during EL3 execution. This is not
83	 * required for the fast path above because it does not perform any
84	 * memory loads.
85	 */
86	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
87	orr	x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
88	msr	CORTEX_A76_CPUACTLR2_EL1, x2
89	isb
90
91	/*
92	 * The caller may have passed arguments to EL3 via x2-x3.
93	 * Restore these registers from the context before jumping to the
94	 * main runtime vector table entry.
95	 */
96	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
97	.endm
98
99vector_base cortex_a76_wa_cve_2018_3639_a76_vbar
100
101	/* ---------------------------------------------------------------------
102	 * Current EL with SP_EL0 : 0x0 - 0x200
103	 * ---------------------------------------------------------------------
104	 */
105vector_entry cortex_a76_sync_exception_sp_el0
106	b	sync_exception_sp_el0
107end_vector_entry cortex_a76_sync_exception_sp_el0
108
109vector_entry cortex_a76_irq_sp_el0
110	b	irq_sp_el0
111end_vector_entry cortex_a76_irq_sp_el0
112
113vector_entry cortex_a76_fiq_sp_el0
114	b	fiq_sp_el0
115end_vector_entry cortex_a76_fiq_sp_el0
116
117vector_entry cortex_a76_serror_sp_el0
118	b	serror_sp_el0
119end_vector_entry cortex_a76_serror_sp_el0
120
121	/* ---------------------------------------------------------------------
122	 * Current EL with SP_ELx: 0x200 - 0x400
123	 * ---------------------------------------------------------------------
124	 */
125vector_entry cortex_a76_sync_exception_sp_elx
126	b	sync_exception_sp_elx
127end_vector_entry cortex_a76_sync_exception_sp_elx
128
129vector_entry cortex_a76_irq_sp_elx
130	b	irq_sp_elx
131end_vector_entry cortex_a76_irq_sp_elx
132
133vector_entry cortex_a76_fiq_sp_elx
134	b	fiq_sp_elx
135end_vector_entry cortex_a76_fiq_sp_elx
136
137vector_entry cortex_a76_serror_sp_elx
138	b	serror_sp_elx
139end_vector_entry cortex_a76_serror_sp_elx
140
141	/* ---------------------------------------------------------------------
142	 * Lower EL using AArch64 : 0x400 - 0x600
143	 * ---------------------------------------------------------------------
144	 */
145vector_entry cortex_a76_sync_exception_aarch64
146	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
147	b	sync_exception_aarch64
148end_vector_entry cortex_a76_sync_exception_aarch64
149
150vector_entry cortex_a76_irq_aarch64
151	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
152	b	irq_aarch64
153end_vector_entry cortex_a76_irq_aarch64
154
155vector_entry cortex_a76_fiq_aarch64
156	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
157	b	fiq_aarch64
158end_vector_entry cortex_a76_fiq_aarch64
159
160vector_entry cortex_a76_serror_aarch64
161	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
162	b	serror_aarch64
163end_vector_entry cortex_a76_serror_aarch64
164
165	/* ---------------------------------------------------------------------
166	 * Lower EL using AArch32 : 0x600 - 0x800
167	 * ---------------------------------------------------------------------
168	 */
169vector_entry cortex_a76_sync_exception_aarch32
170	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
171	b	sync_exception_aarch32
172end_vector_entry cortex_a76_sync_exception_aarch32
173
174vector_entry cortex_a76_irq_aarch32
175	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
176	b	irq_aarch32
177end_vector_entry cortex_a76_irq_aarch32
178
179vector_entry cortex_a76_fiq_aarch32
180	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
181	b	fiq_aarch32
182end_vector_entry cortex_a76_fiq_aarch32
183
184vector_entry cortex_a76_serror_aarch32
185	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
186	b	serror_aarch32
187end_vector_entry cortex_a76_serror_aarch32
188#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
189
190	/* --------------------------------------------------
191	 * Errata Workaround for Cortex A76 Errata #1073348.
192	 * This applies only to revision <= r1p0 of Cortex A76.
193	 * Inputs:
194	 * x0: variant[4:7] and revision[0:3] of current cpu.
195	 * Shall clobber: x0-x17
196	 * --------------------------------------------------
197	 */
198func errata_a76_1073348_wa
199	/*
200	 * Compare x0 against revision r1p0
201	 */
202	mov	x17, x30
203	bl	check_errata_1073348
204	cbz	x0, 1f
205	mrs	x1, CORTEX_A76_CPUACTLR_EL1
206	orr	x1, x1 ,#CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
207	msr	CORTEX_A76_CPUACTLR_EL1, x1
208	isb
2091:
210	ret	x17
211	endfunc errata_a76_1073348_wa
212
213func check_errata_1073348
214	mov	x1, #0x10
215	b	cpu_rev_var_ls
216endfunc check_errata_1073348
217
218	/* --------------------------------------------------
219	 * Errata Workaround for Cortex A76 Errata #1130799.
220	 * This applies only to revision <= r2p0 of Cortex A76.
221	 * Inputs:
222	 * x0: variant[4:7] and revision[0:3] of current cpu.
223	 * Shall clobber: x0-x17
224	 * --------------------------------------------------
225	 */
226func errata_a76_1130799_wa
227	/*
228	 * Compare x0 against revision r2p0
229	 */
230	mov	x17, x30
231	bl	check_errata_1130799
232	cbz	x0, 1f
233	mrs	x1, CORTEX_A76_CPUACTLR2_EL1
234	orr	x1, x1 ,#(1 << 59)
235	msr	CORTEX_A76_CPUACTLR2_EL1, x1
236	isb
2371:
238	ret	x17
239endfunc errata_a76_1130799_wa
240
241func check_errata_1130799
242	mov	x1, #0x20
243	b	cpu_rev_var_ls
244endfunc check_errata_1130799
245
246	/* --------------------------------------------------
247	 * Errata Workaround for Cortex A76 Errata #1220197.
248	 * This applies only to revision <= r2p0 of Cortex A76.
249	 * Inputs:
250	 * x0: variant[4:7] and revision[0:3] of current cpu.
251	 * Shall clobber: x0-x17
252	 * --------------------------------------------------
253	 */
254func errata_a76_1220197_wa
255/*
256 * Compare x0 against revision r2p0
257 */
258	mov	x17, x30
259	bl	check_errata_1220197
260	cbz	x0, 1f
261	mrs	x1, CORTEX_A76_CPUECTLR_EL1
262	orr	x1, x1, #CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
263	msr	CORTEX_A76_CPUECTLR_EL1, x1
264	isb
2651:
266	ret	x17
267endfunc errata_a76_1220197_wa
268
269func check_errata_1220197
270	mov	x1, #0x20
271	b	cpu_rev_var_ls
272endfunc check_errata_1220197
273
274func check_errata_cve_2018_3639
275#if WORKAROUND_CVE_2018_3639
276	mov	x0, #ERRATA_APPLIES
277#else
278	mov	x0, #ERRATA_MISSING
279#endif
280	ret
281endfunc check_errata_cve_2018_3639
282
283func cortex_a76_disable_wa_cve_2018_3639
284	mrs	x0, CORTEX_A76_CPUACTLR2_EL1
285	bic	x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
286	msr	CORTEX_A76_CPUACTLR2_EL1, x0
287	isb
288	ret
289endfunc cortex_a76_disable_wa_cve_2018_3639
290
291	/* -------------------------------------------------
292	 * The CPU Ops reset function for Cortex-A76.
293	 * Shall clobber: x0-x19
294	 * -------------------------------------------------
295	 */
296func cortex_a76_reset_func
297	mov	x19, x30
298	bl	cpu_get_rev_var
299	mov	x18, x0
300
301#if ERRATA_A76_1073348
302	mov	x0, x18
303	bl	errata_a76_1073348_wa
304#endif
305
306#if ERRATA_A76_1130799
307	mov	x0, x18
308	bl	errata_a76_1130799_wa
309#endif
310
311#if ERRATA_A76_1220197
312	mov	x0, x18
313	bl	errata_a76_1220197_wa
314#endif
315
316#if WORKAROUND_CVE_2018_3639
317	/* If the PE implements SSBS, we don't need the dynamic workaround */
318	mrs	x0, id_aa64pfr1_el1
319	lsr	x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
320	and	x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
321#if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
322	cmp	x0, 0
323	ASM_ASSERT(ne)
324#endif
325#if DYNAMIC_WORKAROUND_CVE_2018_3639
326	cbnz	x0, 1f
327	mrs	x0, CORTEX_A76_CPUACTLR2_EL1
328	orr	x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
329	msr	CORTEX_A76_CPUACTLR2_EL1, x0
330	isb
331
332#ifdef IMAGE_BL31
333	/*
334	 * The Cortex-A76 generic vectors are overwritten to use the vectors
335	 * defined above. This is required in order to apply mitigation
336	 * against CVE-2018-3639 on exception entry from lower ELs.
337	 */
338	adr	x0, cortex_a76_wa_cve_2018_3639_a76_vbar
339	msr	vbar_el3, x0
340	isb
341#endif /* IMAGE_BL31 */
342
3431:
344#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
345#endif /* WORKAROUND_CVE_2018_3639 */
346
347#if ERRATA_DSU_798953
348	bl	errata_dsu_798953_wa
349#endif
350
351#if ERRATA_DSU_936184
352	bl	errata_dsu_936184_wa
353#endif
354
355	ret	x19
356endfunc cortex_a76_reset_func
357
358	/* ---------------------------------------------
359	 * HW will do the cache maintenance while powering down
360	 * ---------------------------------------------
361	 */
362func cortex_a76_core_pwr_dwn
363	/* ---------------------------------------------
364	 * Enable CPU power down bit in power control register
365	 * ---------------------------------------------
366	 */
367	mrs	x0, CORTEX_A76_CPUPWRCTLR_EL1
368	orr	x0, x0, #CORTEX_A76_CORE_PWRDN_EN_MASK
369	msr	CORTEX_A76_CPUPWRCTLR_EL1, x0
370	isb
371	ret
372endfunc cortex_a76_core_pwr_dwn
373
374#if REPORT_ERRATA
375/*
376 * Errata printing function for Cortex A76. Must follow AAPCS.
377 */
378func cortex_a76_errata_report
379	stp	x8, x30, [sp, #-16]!
380
381	bl	cpu_get_rev_var
382	mov	x8, x0
383
384	/*
385	 * Report all errata. The revision-variant information is passed to
386	 * checking functions of each errata.
387	 */
388	report_errata ERRATA_A76_1073348, cortex_a76, 1073348
389	report_errata ERRATA_A76_1130799, cortex_a76, 1130799
390	report_errata ERRATA_A76_1220197, cortex_a76, 1220197
391	report_errata WORKAROUND_CVE_2018_3639, cortex_a76, cve_2018_3639
392	report_errata ERRATA_DSU_798953, cortex_a76, dsu_798953
393	report_errata ERRATA_DSU_936184, cortex_a76, dsu_936184
394
395	ldp	x8, x30, [sp], #16
396	ret
397endfunc cortex_a76_errata_report
398#endif
399
400	/* ---------------------------------------------
401	 * This function provides cortex_a76 specific
402	 * register information for crash reporting.
403	 * It needs to return with x6 pointing to
404	 * a list of register names in ascii and
405	 * x8 - x15 having values of registers to be
406	 * reported.
407	 * ---------------------------------------------
408	 */
409.section .rodata.cortex_a76_regs, "aS"
410cortex_a76_regs:  /* The ascii list of register names to be reported */
411	.asciz	"cpuectlr_el1", ""
412
413func cortex_a76_cpu_reg_dump
414	adr	x6, cortex_a76_regs
415	mrs	x8, CORTEX_A76_CPUECTLR_EL1
416	ret
417endfunc cortex_a76_cpu_reg_dump
418
419declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
420	cortex_a76_reset_func, \
421	CPU_NO_EXTRA1_FUNC, \
422	cortex_a76_disable_wa_cve_2018_3639, \
423	cortex_a76_core_pwr_dwn
424