xref: /rk3399_ARM-atf/lib/cpus/aarch64/neoverse_n1.S (revision f86098a62cb23a2c1620f909540c105f60441cfc)
1/*
2 * Copyright (c) 2017-2023, Arm Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <cpuamu.h>
10#include <cpu_macros.S>
11#include <neoverse_n1.h>
12#include "wa_cve_2022_23960_bhb_vector.S"
13
14/* Hardware handled coherency */
15#if HW_ASSISTED_COHERENCY == 0
16#error "Neoverse N1 must be compiled with HW_ASSISTED_COHERENCY enabled"
17#endif
18
19/* 64-bit only core */
20#if CTX_INCLUDE_AARCH32_REGS == 1
21#error "Neoverse-N1 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
22#endif
23
24	.global neoverse_n1_errata_ic_trap_handler
25
26#if WORKAROUND_CVE_2022_23960
27	wa_cve_2022_23960_bhb_vector_table NEOVERSE_N1_BHB_LOOP_COUNT, neoverse_n1
28#endif /* WORKAROUND_CVE_2022_23960 */
29
30/*
31 * ERRATA_DSU_936184:
32 * The errata is defined in dsu_helpers.S and applies to Neoverse N1.
33 * Henceforth creating symbolic names to the already existing errata
34 * workaround functions to get them registered under the Errata Framework.
35 */
36.equ check_erratum_neoverse_n1_936184, check_errata_dsu_936184
37.equ erratum_neoverse_n1_936184_wa, errata_dsu_936184_wa
38add_erratum_entry neoverse_n1, ERRATUM(936184), ERRATA_DSU_936184, APPLY_AT_RESET
39
40workaround_reset_start neoverse_n1, ERRATUM(1043202), ERRATA_N1_1043202
41	/* Apply instruction patching sequence */
42	ldr	x0, =0x0
43	msr	CPUPSELR_EL3, x0
44	ldr	x0, =0xF3BF8F2F
45	msr	CPUPOR_EL3, x0
46	ldr	x0, =0xFFFFFFFF
47	msr	CPUPMR_EL3, x0
48	ldr	x0, =0x800200071
49	msr	CPUPCR_EL3, x0
50workaround_reset_end neoverse_n1, ERRATUM(1043202)
51
52check_erratum_ls neoverse_n1, ERRATUM(1043202), CPU_REV(1, 0)
53
54workaround_reset_start neoverse_n1, ERRATUM(1073348), ERRATA_N1_1073348
55	mrs	x1, NEOVERSE_N1_CPUACTLR_EL1
56	orr	x1, x1, NEOVERSE_N1_CPUACTLR_EL1_BIT_6
57	msr	NEOVERSE_N1_CPUACTLR_EL1, x1
58workaround_reset_end neoverse_n1, ERRATUM(1073348)
59
60check_erratum_ls neoverse_n1, ERRATUM(1073348), CPU_REV(1, 0)
61
62workaround_reset_start neoverse_n1, ERRATUM(1130799), ERRATA_N1_1130799
63	mrs	x1, NEOVERSE_N1_CPUACTLR2_EL1
64	orr	x1, x1, NEOVERSE_N1_CPUACTLR2_EL1_BIT_59
65	msr	NEOVERSE_N1_CPUACTLR2_EL1, x1
66workaround_reset_end neoverse_n1, ERRATUM(1130799)
67
68check_erratum_ls neoverse_n1, ERRATUM(1130799), CPU_REV(2, 0)
69
70workaround_reset_start neoverse_n1, ERRATUM(1165347), ERRATA_N1_1165347
71	mrs	x1, NEOVERSE_N1_CPUACTLR2_EL1
72	orr	x1, x1, NEOVERSE_N1_CPUACTLR2_EL1_BIT_0
73	orr	x1, x1, NEOVERSE_N1_CPUACTLR2_EL1_BIT_15
74	msr	NEOVERSE_N1_CPUACTLR2_EL1, x1
75workaround_reset_end neoverse_n1, ERRATUM(1165347)
76
77check_erratum_ls neoverse_n1, ERRATUM(1165347), CPU_REV(2, 0)
78
79workaround_reset_start neoverse_n1, ERRATUM(1207823), ERRATA_N1_1207823
80	mrs	x1, NEOVERSE_N1_CPUACTLR2_EL1
81	orr	x1, x1, NEOVERSE_N1_CPUACTLR2_EL1_BIT_11
82	msr	NEOVERSE_N1_CPUACTLR2_EL1, x1
83workaround_reset_end neoverse_n1, ERRATUM(1207823)
84
85check_erratum_ls neoverse_n1, ERRATUM(1207823), CPU_REV(2, 0)
86
87workaround_reset_start neoverse_n1, ERRATUM(1220197), ERRATA_N1_1220197
88	mrs	x1, NEOVERSE_N1_CPUECTLR_EL1
89	orr	x1, x1, NEOVERSE_N1_WS_THR_L2_MASK
90	msr	NEOVERSE_N1_CPUECTLR_EL1, x1
91workaround_reset_end neoverse_n1, ERRATUM(1220197)
92
93check_erratum_ls neoverse_n1, ERRATUM(1220197), CPU_REV(2, 0)
94
95workaround_reset_start neoverse_n1, ERRATUM(1257314), ERRATA_N1_1257314
96	mrs	x1, NEOVERSE_N1_CPUACTLR3_EL1
97	orr	x1, x1, NEOVERSE_N1_CPUACTLR3_EL1_BIT_10
98	msr	NEOVERSE_N1_CPUACTLR3_EL1, x1
99workaround_reset_end neoverse_n1, ERRATUM(1257314)
100
101check_erratum_ls neoverse_n1, ERRATUM(1257314), CPU_REV(3, 0)
102
103workaround_reset_start neoverse_n1, ERRATUM(1262606), ERRATA_N1_1262606
104	mrs	x1, NEOVERSE_N1_CPUACTLR_EL1
105	orr	x1, x1, NEOVERSE_N1_CPUACTLR_EL1_BIT_13
106	msr	NEOVERSE_N1_CPUACTLR_EL1, x1
107workaround_reset_end neoverse_n1, ERRATUM(1262606)
108
109check_erratum_ls neoverse_n1, ERRATUM(1262606), CPU_REV(3, 0)
110
111workaround_reset_start neoverse_n1, ERRATUM(1262888), ERRATA_N1_1262888
112	mrs	x1, NEOVERSE_N1_CPUECTLR_EL1
113	orr	x1, x1, NEOVERSE_N1_CPUECTLR_EL1_MM_TLBPF_DIS_BIT
114	msr	NEOVERSE_N1_CPUECTLR_EL1, x1
115workaround_reset_end neoverse_n1, ERRATUM(1262888)
116
117check_erratum_ls neoverse_n1, ERRATUM(1262888), CPU_REV(3, 0)
118
119workaround_reset_start neoverse_n1, ERRATUM(1275112), ERRATA_N1_1275112
120	mrs	x1, NEOVERSE_N1_CPUACTLR_EL1
121	orr	x1, x1, NEOVERSE_N1_CPUACTLR_EL1_BIT_13
122	msr	NEOVERSE_N1_CPUACTLR_EL1, x1
123workaround_reset_end neoverse_n1, ERRATUM(1275112)
124
125check_erratum_ls neoverse_n1, ERRATUM(1275112), CPU_REV(3, 0)
126
127workaround_reset_start neoverse_n1, ERRATUM(1315703), ERRATA_N1_1315703
128	mrs	x0, NEOVERSE_N1_CPUACTLR2_EL1
129	orr	x0, x0, #NEOVERSE_N1_CPUACTLR2_EL1_BIT_16
130	msr	NEOVERSE_N1_CPUACTLR2_EL1, x0
131workaround_reset_end neoverse_n1, ERRATUM(1315703)
132
133check_erratum_ls neoverse_n1, ERRATUM(1315703), CPU_REV(3, 0)
134
135workaround_reset_start neoverse_n1, ERRATUM(1542419), ERRATA_N1_1542419
136	/* Apply instruction patching sequence */
137	ldr	x0, =0x0
138	msr	CPUPSELR_EL3, x0
139	ldr	x0, =0xEE670D35
140	msr	CPUPOR_EL3, x0
141	ldr	x0, =0xFFFF0FFF
142	msr	CPUPMR_EL3, x0
143	ldr	x0, =0x08000020007D
144	msr	CPUPCR_EL3, x0
145	isb
146workaround_reset_end neoverse_n1, ERRATUM(1542419)
147
148check_erratum_range neoverse_n1, ERRATUM(1542419), CPU_REV(3, 0), CPU_REV(4, 0)
149
150workaround_reset_start neoverse_n1, ERRATUM(1868343), ERRATA_N1_1868343
151	mrs	x1, NEOVERSE_N1_CPUACTLR_EL1
152	orr	x1, x1, NEOVERSE_N1_CPUACTLR_EL1_BIT_13
153	msr	NEOVERSE_N1_CPUACTLR_EL1, x1
154workaround_reset_end neoverse_n1, ERRATUM(1868343)
155
156check_erratum_ls neoverse_n1, ERRATUM(1868343), CPU_REV(4, 0)
157
158workaround_reset_start neoverse_n1, ERRATUM(1946160), ERRATA_N1_1946160
159	mov	x0, #3
160	msr	S3_6_C15_C8_0, x0
161	ldr	x0, =0x10E3900002
162	msr	S3_6_C15_C8_2, x0
163	ldr	x0, =0x10FFF00083
164	msr	S3_6_C15_C8_3, x0
165	ldr	x0, =0x2001003FF
166	msr	S3_6_C15_C8_1, x0
167	mov	x0, #4
168	msr	S3_6_C15_C8_0, x0
169	ldr	x0, =0x10E3800082
170	msr	S3_6_C15_C8_2, x0
171	ldr	x0, =0x10FFF00083
172	msr	S3_6_C15_C8_3, x0
173	ldr	x0, =0x2001003FF
174	msr	S3_6_C15_C8_1, x0
175	mov	x0, #5
176	msr	S3_6_C15_C8_0, x0
177	ldr	x0, =0x10E3800200
178	msr	S3_6_C15_C8_2, x0
179	ldr	x0, =0x10FFF003E0
180	msr	S3_6_C15_C8_3, x0
181	ldr	x0, =0x2001003FF
182	msr	S3_6_C15_C8_1, x0
183	isb
184workaround_reset_end neoverse_n1, ERRATUM(1946160)
185
186check_erratum_range neoverse_n1, ERRATUM(1946160), CPU_REV(3, 0), CPU_REV(4, 1)
187
188workaround_runtime_start neoverse_n1, ERRATUM(2743102), ERRATA_N1_2743102
189	/* dsb before isb of power down sequence */
190	dsb	sy
191workaround_runtime_end neoverse_n1, ERRATUM(2743102)
192
193check_erratum_ls neoverse_n1, ERRATUM(2743102), CPU_REV(4, 1)
194
195workaround_reset_start neoverse_n1, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
196#if IMAGE_BL31
197	/*
198	 * The Neoverse-N1 generic vectors are overridden to apply errata
199	 * mitigation on exception entry from lower ELs.
200	 */
201	adr	x0, wa_cve_vbar_neoverse_n1
202	msr	vbar_el3, x0
203#endif /* IMAGE_BL31 */
204workaround_reset_end neoverse_n1, CVE(2022, 23960)
205
206check_erratum_chosen neoverse_n1, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
207
208/* --------------------------------------------------
209 * Disable speculative loads if Neoverse N1 supports
210 * SSBS.
211 *
212 * Shall clobber: x0.
213 * --------------------------------------------------
214 */
215func neoverse_n1_disable_speculative_loads
216	/* Check if the PE implements SSBS */
217	mrs	x0, id_aa64pfr1_el1
218	tst	x0, #(ID_AA64PFR1_EL1_SSBS_MASK << ID_AA64PFR1_EL1_SSBS_SHIFT)
219	b.eq	1f
220
221	/* Disable speculative loads */
222	msr	SSBS, xzr
223
2241:
225	ret
226endfunc neoverse_n1_disable_speculative_loads
227
228cpu_reset_func_start neoverse_n1
229	bl neoverse_n1_disable_speculative_loads
230
231	/* Forces all cacheable atomic instructions to be near */
232	mrs	x0, NEOVERSE_N1_CPUACTLR2_EL1
233	orr	x0, x0, #NEOVERSE_N1_CPUACTLR2_EL1_BIT_2
234	msr	NEOVERSE_N1_CPUACTLR2_EL1, x0
235	isb
236
237#if ENABLE_FEAT_AMU
238	/* Make sure accesses from EL0/EL1 and EL2 are not trapped to EL3 */
239	mrs	x0, actlr_el3
240	orr	x0, x0, #NEOVERSE_N1_ACTLR_AMEN_BIT
241	msr	actlr_el3, x0
242
243	/* Make sure accesses from EL0/EL1 are not trapped to EL2 */
244	mrs	x0, actlr_el2
245	orr	x0, x0, #NEOVERSE_N1_ACTLR_AMEN_BIT
246	msr	actlr_el2, x0
247
248	/* Enable group0 counters */
249	mov	x0, #NEOVERSE_N1_AMU_GROUP0_MASK
250	msr	CPUAMCNTENSET_EL0, x0
251#endif
252
253#if NEOVERSE_Nx_EXTERNAL_LLC
254	/* Some system may have External LLC, core needs to be made aware */
255	mrs     x0, NEOVERSE_N1_CPUECTLR_EL1
256	orr     x0, x0, NEOVERSE_N1_CPUECTLR_EL1_EXTLLC_BIT
257	msr     NEOVERSE_N1_CPUECTLR_EL1, x0
258#endif
259cpu_reset_func_end neoverse_n1
260
261	/* ---------------------------------------------
262	 * HW will do the cache maintenance while powering down
263	 * ---------------------------------------------
264	 */
265func neoverse_n1_core_pwr_dwn
266	/* ---------------------------------------------
267	 * Enable CPU power down bit in power control register
268	 * ---------------------------------------------
269	 */
270	mrs	x0, NEOVERSE_N1_CPUPWRCTLR_EL1
271	orr	x0, x0, #NEOVERSE_N1_CORE_PWRDN_EN_MASK
272	msr	NEOVERSE_N1_CPUPWRCTLR_EL1, x0
273#if ERRATA_N1_2743102
274	mov	x15, x30
275	bl	cpu_get_rev_var
276	bl	erratum_neoverse_n1_2743102_wa
277	mov	x30, x15
278#endif /* ERRATA_N1_2743102 */
279	isb
280	ret
281endfunc neoverse_n1_core_pwr_dwn
282
283errata_report_shim neoverse_n1
284
285/*
286 * Handle trap of EL0 IC IVAU instructions to EL3 by executing a TLB
287 * inner-shareable invalidation to an arbitrary address followed by a DSB.
288 *
289 * x1: Exception Syndrome
290 */
291func neoverse_n1_errata_ic_trap_handler
292	cmp	x1, #NEOVERSE_N1_EC_IC_TRAP
293	b.ne	1f
294	tlbi	vae3is, xzr
295	dsb	sy
296
297	# Skip the IC instruction itself
298	mrs     x3, elr_el3
299	add     x3, x3, #4
300	msr     elr_el3, x3
301
302	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
303	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
304	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
305	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
306
307	/*
308	 * Issue Error Synchronization Barrier to synchronize SErrors before
309	 * exiting EL3. We're running with EAs unmasked, so any synchronized
310	 * errors would be taken immediately; therefore no need to inspect
311	 * DISR_EL1 register.
312	 */
313	esb
314	exception_return
3151:
316	ret
317endfunc neoverse_n1_errata_ic_trap_handler
318
319	/* ---------------------------------------------
320	 * This function provides neoverse_n1 specific
321	 * register information for crash reporting.
322	 * It needs to return with x6 pointing to
323	 * a list of register names in ascii and
324	 * x8 - x15 having values of registers to be
325	 * reported.
326	 * ---------------------------------------------
327	 */
328.section .rodata.neoverse_n1_regs, "aS"
329neoverse_n1_regs:  /* The ascii list of register names to be reported */
330	.asciz	"cpuectlr_el1", ""
331
332func neoverse_n1_cpu_reg_dump
333	adr	x6, neoverse_n1_regs
334	mrs	x8, NEOVERSE_N1_CPUECTLR_EL1
335	ret
336endfunc neoverse_n1_cpu_reg_dump
337
338declare_cpu_ops_eh neoverse_n1, NEOVERSE_N1_MIDR, \
339	neoverse_n1_reset_func, \
340	neoverse_n1_errata_ic_trap_handler, \
341	neoverse_n1_core_pwr_dwn
342