xref: /rk3399_ARM-atf/lib/cpus/aarch64/cortex_a76.S (revision e1958506ae384fabdbe000c8b3055e1618d97bab)
1/*
2 * Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <common/bl_common.h>
10#include <context.h>
11#include <cortex_a76.h>
12#include <cpu_macros.S>
13#include <plat_macros.S>
14#include <services/arm_arch_svc.h>
15
16/* Hardware handled coherency */
17#if HW_ASSISTED_COHERENCY == 0
18#error "Cortex-A76 must be compiled with HW_ASSISTED_COHERENCY enabled"
19#endif
20
21#define ESR_EL3_A64_SMC0	0x5e000000
22#define ESR_EL3_A32_SMC0	0x4e000000
23
24#if DYNAMIC_WORKAROUND_CVE_2018_3639
25	/*
26	 * This macro applies the mitigation for CVE-2018-3639.
27	 * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
28	 * SMC calls from a lower EL running in AArch32 or AArch64
29	 * will go through the fast and return early.
30	 *
31	 * The macro saves x2-x3 to the context. In the fast path
32	 * x0-x3 registers do not need to be restored as the calling
33	 * context will have saved them.
34	 */
35	.macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
36	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
37
38	.if \_is_sync_exception
39		/*
40		 * Ensure SMC is coming from A64/A32 state on #0
41		 * with W0 = SMCCC_ARCH_WORKAROUND_2
42		 *
43		 * This sequence evaluates as:
44		 *    (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
45		 * allowing use of a single branch operation
46		 */
47		orr	w2, wzr, #SMCCC_ARCH_WORKAROUND_2
48		cmp	x0, x2
49		mrs	x3, esr_el3
50		mov_imm	w2, \_esr_el3_val
51		ccmp	w2, w3, #0, eq
52		/*
53		 * Static predictor will predict a fall-through, optimizing
54		 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
55		 */
56		bne	1f
57
58		/*
59		 * The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
60		 * fast path.
61		 */
62		cmp	x1, xzr /* enable/disable check */
63
64		/*
65		 * When the calling context wants mitigation disabled,
66		 * we program the mitigation disable function in the
67		 * CPU context, which gets invoked on subsequent exits from
68		 * EL3 via the `el3_exit` function. Otherwise NULL is
69		 * programmed in the CPU context, which results in caller's
70		 * inheriting the EL3 mitigation state (enabled) on subsequent
71		 * `el3_exit`.
72		 */
73		mov	x0, xzr
74		adr	x1, cortex_a76_disable_wa_cve_2018_3639
75		csel	x1, x1, x0, eq
76		str	x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
77
78		mrs	x2, CORTEX_A76_CPUACTLR2_EL1
79		orr	x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
80		bic	x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
81		csel	x3, x3, x1, eq
82		msr	CORTEX_A76_CPUACTLR2_EL1, x3
83		eret	/* ERET implies ISB */
84	.endif
851:
86	/*
87	 * Always enable v4 mitigation during EL3 execution. This is not
88	 * required for the fast path above because it does not perform any
89	 * memory loads.
90	 */
91	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
92	orr	x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
93	msr	CORTEX_A76_CPUACTLR2_EL1, x2
94	isb
95
96	/*
97	 * The caller may have passed arguments to EL3 via x2-x3.
98	 * Restore these registers from the context before jumping to the
99	 * main runtime vector table entry.
100	 */
101	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
102	.endm
103
104vector_base cortex_a76_wa_cve_2018_3639_a76_vbar
105
106	/* ---------------------------------------------------------------------
107	 * Current EL with SP_EL0 : 0x0 - 0x200
108	 * ---------------------------------------------------------------------
109	 */
110vector_entry cortex_a76_sync_exception_sp_el0
111	b	sync_exception_sp_el0
112end_vector_entry cortex_a76_sync_exception_sp_el0
113
114vector_entry cortex_a76_irq_sp_el0
115	b	irq_sp_el0
116end_vector_entry cortex_a76_irq_sp_el0
117
118vector_entry cortex_a76_fiq_sp_el0
119	b	fiq_sp_el0
120end_vector_entry cortex_a76_fiq_sp_el0
121
122vector_entry cortex_a76_serror_sp_el0
123	b	serror_sp_el0
124end_vector_entry cortex_a76_serror_sp_el0
125
126	/* ---------------------------------------------------------------------
127	 * Current EL with SP_ELx: 0x200 - 0x400
128	 * ---------------------------------------------------------------------
129	 */
130vector_entry cortex_a76_sync_exception_sp_elx
131	b	sync_exception_sp_elx
132end_vector_entry cortex_a76_sync_exception_sp_elx
133
134vector_entry cortex_a76_irq_sp_elx
135	b	irq_sp_elx
136end_vector_entry cortex_a76_irq_sp_elx
137
138vector_entry cortex_a76_fiq_sp_elx
139	b	fiq_sp_elx
140end_vector_entry cortex_a76_fiq_sp_elx
141
142vector_entry cortex_a76_serror_sp_elx
143	b	serror_sp_elx
144end_vector_entry cortex_a76_serror_sp_elx
145
146	/* ---------------------------------------------------------------------
147	 * Lower EL using AArch64 : 0x400 - 0x600
148	 * ---------------------------------------------------------------------
149	 */
150vector_entry cortex_a76_sync_exception_aarch64
151	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
152	b	sync_exception_aarch64
153end_vector_entry cortex_a76_sync_exception_aarch64
154
155vector_entry cortex_a76_irq_aarch64
156	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
157	b	irq_aarch64
158end_vector_entry cortex_a76_irq_aarch64
159
160vector_entry cortex_a76_fiq_aarch64
161	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
162	b	fiq_aarch64
163end_vector_entry cortex_a76_fiq_aarch64
164
165vector_entry cortex_a76_serror_aarch64
166	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
167	b	serror_aarch64
168end_vector_entry cortex_a76_serror_aarch64
169
170	/* ---------------------------------------------------------------------
171	 * Lower EL using AArch32 : 0x600 - 0x800
172	 * ---------------------------------------------------------------------
173	 */
174vector_entry cortex_a76_sync_exception_aarch32
175	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
176	b	sync_exception_aarch32
177end_vector_entry cortex_a76_sync_exception_aarch32
178
179vector_entry cortex_a76_irq_aarch32
180	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
181	b	irq_aarch32
182end_vector_entry cortex_a76_irq_aarch32
183
184vector_entry cortex_a76_fiq_aarch32
185	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
186	b	fiq_aarch32
187end_vector_entry cortex_a76_fiq_aarch32
188
189vector_entry cortex_a76_serror_aarch32
190	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
191	b	serror_aarch32
192end_vector_entry cortex_a76_serror_aarch32
193#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
194
195	/* --------------------------------------------------
196	 * Errata Workaround for Cortex A76 Errata #1073348.
197	 * This applies only to revision <= r1p0 of Cortex A76.
198	 * Inputs:
199	 * x0: variant[4:7] and revision[0:3] of current cpu.
200	 * Shall clobber: x0-x17
201	 * --------------------------------------------------
202	 */
203func errata_a76_1073348_wa
204	/*
205	 * Compare x0 against revision r1p0
206	 */
207	mov	x17, x30
208	bl	check_errata_1073348
209	cbz	x0, 1f
210	mrs	x1, CORTEX_A76_CPUACTLR_EL1
211	orr	x1, x1 ,#CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
212	msr	CORTEX_A76_CPUACTLR_EL1, x1
213	isb
2141:
215	ret	x17
216	endfunc errata_a76_1073348_wa
217
218func check_errata_1073348
219	mov	x1, #0x10
220	b	cpu_rev_var_ls
221endfunc check_errata_1073348
222
223	/* --------------------------------------------------
224	 * Errata Workaround for Cortex A76 Errata #1130799.
225	 * This applies only to revision <= r2p0 of Cortex A76.
226	 * Inputs:
227	 * x0: variant[4:7] and revision[0:3] of current cpu.
228	 * Shall clobber: x0-x17
229	 * --------------------------------------------------
230	 */
231func errata_a76_1130799_wa
232	/*
233	 * Compare x0 against revision r2p0
234	 */
235	mov	x17, x30
236	bl	check_errata_1130799
237	cbz	x0, 1f
238	mrs	x1, CORTEX_A76_CPUACTLR2_EL1
239	orr	x1, x1 ,#(1 << 59)
240	msr	CORTEX_A76_CPUACTLR2_EL1, x1
241	isb
2421:
243	ret	x17
244endfunc errata_a76_1130799_wa
245
246func check_errata_1130799
247	mov	x1, #0x20
248	b	cpu_rev_var_ls
249endfunc check_errata_1130799
250
251	/* --------------------------------------------------
252	 * Errata Workaround for Cortex A76 Errata #1220197.
253	 * This applies only to revision <= r2p0 of Cortex A76.
254	 * Inputs:
255	 * x0: variant[4:7] and revision[0:3] of current cpu.
256	 * Shall clobber: x0-x17
257	 * --------------------------------------------------
258	 */
259func errata_a76_1220197_wa
260/*
261 * Compare x0 against revision r2p0
262 */
263	mov	x17, x30
264	bl	check_errata_1220197
265	cbz	x0, 1f
266	mrs	x1, CORTEX_A76_CPUECTLR_EL1
267	orr	x1, x1, #CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
268	msr	CORTEX_A76_CPUECTLR_EL1, x1
269	isb
2701:
271	ret	x17
272endfunc errata_a76_1220197_wa
273
274func check_errata_1220197
275	mov	x1, #0x20
276	b	cpu_rev_var_ls
277endfunc check_errata_1220197
278
279func check_errata_cve_2018_3639
280#if WORKAROUND_CVE_2018_3639
281	mov	x0, #ERRATA_APPLIES
282#else
283	mov	x0, #ERRATA_MISSING
284#endif
285	ret
286endfunc check_errata_cve_2018_3639
287
288func cortex_a76_disable_wa_cve_2018_3639
289	mrs	x0, CORTEX_A76_CPUACTLR2_EL1
290	bic	x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
291	msr	CORTEX_A76_CPUACTLR2_EL1, x0
292	isb
293	ret
294endfunc cortex_a76_disable_wa_cve_2018_3639
295
296	/* -------------------------------------------------
297	 * The CPU Ops reset function for Cortex-A76.
298	 * Shall clobber: x0-x19
299	 * -------------------------------------------------
300	 */
301func cortex_a76_reset_func
302	mov	x19, x30
303	bl	cpu_get_rev_var
304	mov	x18, x0
305
306#if ERRATA_A76_1073348
307	mov	x0, x18
308	bl	errata_a76_1073348_wa
309#endif
310
311#if ERRATA_A76_1130799
312	mov	x0, x18
313	bl	errata_a76_1130799_wa
314#endif
315
316#if ERRATA_A76_1220197
317	mov	x0, x18
318	bl	errata_a76_1220197_wa
319#endif
320
321#if WORKAROUND_CVE_2018_3639
322	/* If the PE implements SSBS, we don't need the dynamic workaround */
323	mrs	x0, id_aa64pfr1_el1
324	lsr	x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
325	and	x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
326#if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
327	cmp	x0, 0
328	ASM_ASSERT(ne)
329#endif
330#if DYNAMIC_WORKAROUND_CVE_2018_3639
331	cbnz	x0, 1f
332	mrs	x0, CORTEX_A76_CPUACTLR2_EL1
333	orr	x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
334	msr	CORTEX_A76_CPUACTLR2_EL1, x0
335	isb
336
337#ifdef IMAGE_BL31
338	/*
339	 * The Cortex-A76 generic vectors are overwritten to use the vectors
340	 * defined above. This is required in order to apply mitigation
341	 * against CVE-2018-3639 on exception entry from lower ELs.
342	 */
343	adr	x0, cortex_a76_wa_cve_2018_3639_a76_vbar
344	msr	vbar_el3, x0
345	isb
346#endif /* IMAGE_BL31 */
347
3481:
349#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
350#endif /* WORKAROUND_CVE_2018_3639 */
351
352#if ERRATA_DSU_798953
353	bl	errata_dsu_798953_wa
354#endif
355
356#if ERRATA_DSU_936184
357	bl	errata_dsu_936184_wa
358#endif
359
360	ret	x19
361endfunc cortex_a76_reset_func
362
363	/* ---------------------------------------------
364	 * HW will do the cache maintenance while powering down
365	 * ---------------------------------------------
366	 */
367func cortex_a76_core_pwr_dwn
368	/* ---------------------------------------------
369	 * Enable CPU power down bit in power control register
370	 * ---------------------------------------------
371	 */
372	mrs	x0, CORTEX_A76_CPUPWRCTLR_EL1
373	orr	x0, x0, #CORTEX_A76_CORE_PWRDN_EN_MASK
374	msr	CORTEX_A76_CPUPWRCTLR_EL1, x0
375	isb
376	ret
377endfunc cortex_a76_core_pwr_dwn
378
379#if REPORT_ERRATA
380/*
381 * Errata printing function for Cortex A76. Must follow AAPCS.
382 */
383func cortex_a76_errata_report
384	stp	x8, x30, [sp, #-16]!
385
386	bl	cpu_get_rev_var
387	mov	x8, x0
388
389	/*
390	 * Report all errata. The revision-variant information is passed to
391	 * checking functions of each errata.
392	 */
393	report_errata ERRATA_A76_1073348, cortex_a76, 1073348
394	report_errata ERRATA_A76_1130799, cortex_a76, 1130799
395	report_errata ERRATA_A76_1220197, cortex_a76, 1220197
396	report_errata WORKAROUND_CVE_2018_3639, cortex_a76, cve_2018_3639
397	report_errata ERRATA_DSU_798953, cortex_a76, dsu_798953
398	report_errata ERRATA_DSU_936184, cortex_a76, dsu_936184
399
400	ldp	x8, x30, [sp], #16
401	ret
402endfunc cortex_a76_errata_report
403#endif
404
405	/* ---------------------------------------------
406	 * This function provides cortex_a76 specific
407	 * register information for crash reporting.
408	 * It needs to return with x6 pointing to
409	 * a list of register names in ascii and
410	 * x8 - x15 having values of registers to be
411	 * reported.
412	 * ---------------------------------------------
413	 */
414.section .rodata.cortex_a76_regs, "aS"
415cortex_a76_regs:  /* The ascii list of register names to be reported */
416	.asciz	"cpuectlr_el1", ""
417
418func cortex_a76_cpu_reg_dump
419	adr	x6, cortex_a76_regs
420	mrs	x8, CORTEX_A76_CPUECTLR_EL1
421	ret
422endfunc cortex_a76_cpu_reg_dump
423
424declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
425	cortex_a76_reset_func, \
426	CPU_NO_EXTRA1_FUNC, \
427	cortex_a76_disable_wa_cve_2018_3639, \
428	cortex_a76_core_pwr_dwn
429