xref: /rk3399_ARM-atf/lib/cpus/aarch64/cortex_a72.S (revision 35b2bbf4942689fd52fa741ac7d93bc7f1d4c230)
1/*
2 * Copyright (c) 2015-2025, Arm Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6#include <arch.h>
7#include <asm_macros.S>
8#include <assert_macros.S>
9#include <cortex_a72.h>
10#include <cpu_macros.S>
11#include <plat_macros.S>
12#include "wa_cve_2022_23960_bhb_vector.S"
13
14#if WORKAROUND_CVE_2022_23960
15	wa_cve_2022_23960_bhb_vector_table CORTEX_A72_BHB_LOOP_COUNT, cortex_a72
16#endif /* WORKAROUND_CVE_2022_23960 */
17
18cpu_reset_prologue cortex_a72
19
20	/* ---------------------------------------------
21	 * Disable all types of L2 prefetches.
22	 * ---------------------------------------------
23	 */
24func cortex_a72_disable_l2_prefetch
25	mrs	x0, CORTEX_A72_ECTLR_EL1
26	orr	x0, x0, #CORTEX_A72_ECTLR_DIS_TWD_ACC_PFTCH_BIT
27	mov	x1, #CORTEX_A72_ECTLR_L2_IPFTCH_DIST_MASK
28	orr	x1, x1, #CORTEX_A72_ECTLR_L2_DPFTCH_DIST_MASK
29	bic	x0, x0, x1
30	msr	CORTEX_A72_ECTLR_EL1, x0
31	isb
32	ret
33endfunc cortex_a72_disable_l2_prefetch
34
35	/* ---------------------------------------------
36	 * Disable the load-store hardware prefetcher.
37	 * ---------------------------------------------
38	 */
39func cortex_a72_disable_hw_prefetcher
40	sysreg_bit_set CORTEX_A72_CPUACTLR_EL1, CORTEX_A72_CPUACTLR_EL1_DISABLE_L1_DCACHE_HW_PFTCH
41	isb
42	dsb	ish
43	ret
44endfunc cortex_a72_disable_hw_prefetcher
45
46	/* ---------------------------------------------
47	 * Disable intra-cluster coherency
48	 * ---------------------------------------------
49	 */
50func cortex_a72_disable_smp
51	sysreg_bit_clear CORTEX_A72_ECTLR_EL1, CORTEX_A72_ECTLR_SMP_BIT
52	ret
53endfunc cortex_a72_disable_smp
54
55	/* ---------------------------------------------
56	 * Disable debug interfaces
57	 * ---------------------------------------------
58	 */
59func cortex_a72_disable_ext_debug
60	mov	x0, #1
61	msr	osdlr_el1, x0
62	isb
63	dsb	sy
64	ret
65endfunc cortex_a72_disable_ext_debug
66
67check_erratum_custom_start cortex_a72, ERRATUM(ARCH_WORKAROUND_3)
68	cpu_check_csv2	x0, 1f
69	mov	x0, #ERRATA_APPLIES
70	ret
711:
72	mov	x0, #ERRATA_NOT_APPLIES
73	ret
74check_erratum_custom_end cortex_a72, ERRATUM(ARCH_WORKAROUND_3)
75
76/* Erratum entry and check function for SMCCC_ARCH_WORKAROUND_3 */
77add_erratum_entry cortex_a72, ERRATUM(ARCH_WORKAROUND_3), WORKAROUND_CVE_2022_23960
78
79workaround_reset_start cortex_a72, ERRATUM(859971), ERRATA_A72_859971
80	sysreg_bit_set CORTEX_A72_CPUACTLR_EL1, CORTEX_A72_CPUACTLR_EL1_DIS_INSTR_PREFETCH
81workaround_reset_end cortex_a72, ERRATUM(859971)
82
83check_erratum_ls cortex_a72, ERRATUM(859971), CPU_REV(0, 3)
84
85/* Due to the nature of the errata it is applied unconditionally when chosen */
86check_erratum_chosen cortex_a72, ERRATUM(1319367), ERRATA_A72_1319367
87/* erratum workaround is interleaved with generic code */
88add_erratum_entry cortex_a72, ERRATUM(1319367), ERRATA_A72_1319367
89
90workaround_reset_start cortex_a72, CVE(2017, 5715), WORKAROUND_CVE_2017_5715
91#if IMAGE_BL31
92	override_vector_table wa_cve_2017_5715_mmu_vbar
93#endif
94workaround_reset_end cortex_a72, CVE(2017, 5715)
95
96check_erratum_custom_start cortex_a72, CVE(2017, 5715)
97	cpu_check_csv2	x0, 1f
98#if WORKAROUND_CVE_2017_5715
99	mov	x0, #ERRATA_APPLIES
100#else
101	mov	x0, #ERRATA_MISSING
102#endif
103	ret
1041:
105	mov	x0, #ERRATA_NOT_APPLIES
106	ret
107check_erratum_custom_end cortex_a72, CVE(2017, 5715)
108
109workaround_reset_start cortex_a72, CVE(2018, 3639), WORKAROUND_CVE_2018_3639
110	sysreg_bit_set CORTEX_A72_CPUACTLR_EL1, CORTEX_A72_CPUACTLR_EL1_DIS_LOAD_PASS_STORE
111	isb
112	dsb	sy
113workaround_reset_end cortex_a72, CVE(2018, 3639)
114check_erratum_chosen cortex_a72, CVE(2018, 3639), WORKAROUND_CVE_2018_3639
115
116workaround_reset_start cortex_a72, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
117#if IMAGE_BL31
118	/* Skip installing vector table again if already done for CVE(2017, 5715) */
119	/*
120	 * The Cortex-A72 generic vectors are overridden to apply the
121	 * mitigation on exception entry from lower ELs for revisions >= r1p0
122	 * which has CSV2 implemented.
123	 */
124	adr	x0, wa_cve_vbar_cortex_a72
125	mrs	x1, vbar_el3
126	cmp	x0, x1
127	b.eq	1f
128	msr	vbar_el3, x0
1291:
130#endif /* IMAGE_BL31 */
131workaround_reset_end cortex_a72, CVE(2022, 23960)
132
133check_erratum_custom_start cortex_a72, CVE(2022, 23960)
134#if WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960
135	cpu_check_csv2	x0, 1f
136	mov	x0, #ERRATA_APPLIES
137	ret
1381:
139#if WORKAROUND_CVE_2022_23960
140	mov	x0, #ERRATA_APPLIES
141#else
142	mov	x0, #ERRATA_MISSING
143#endif /* WORKAROUND_CVE_2022_23960 */
144	ret
145#endif /* WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960 */
146	mov	x0, #ERRATA_MISSING
147	ret
148check_erratum_custom_end cortex_a72, CVE(2022, 23960)
149
150cpu_reset_func_start cortex_a72
151
152	/* ---------------------------------------------
153	 * Enable the SMP bit.
154	 * ---------------------------------------------
155	 */
156	sysreg_bit_set CORTEX_A72_ECTLR_EL1, CORTEX_A72_ECTLR_SMP_BIT
157
158cpu_reset_func_end cortex_a72
159
160	/* ----------------------------------------------------
161	 * The CPU Ops core power down function for Cortex-A72.
162	 * ----------------------------------------------------
163	 */
164func cortex_a72_core_pwr_dwn
165	mov	x18, x30
166
167	/* ---------------------------------------------
168	 * Disable the L2 prefetches.
169	 * ---------------------------------------------
170	 */
171	bl	cortex_a72_disable_l2_prefetch
172
173	/* ---------------------------------------------
174	 * Disable the load-store hardware prefetcher.
175	 * ---------------------------------------------
176	 */
177	bl	cortex_a72_disable_hw_prefetcher
178
179	/* ---------------------------------------------
180	 * Flush L1 caches.
181	 * ---------------------------------------------
182	 */
183	mov	x0, #DCCISW
184	bl	dcsw_op_level1
185
186	/* ---------------------------------------------
187	 * Come out of intra cluster coherency
188	 * ---------------------------------------------
189	 */
190	bl	cortex_a72_disable_smp
191
192	/* ---------------------------------------------
193	 * Force the debug interfaces to be quiescent
194	 * ---------------------------------------------
195	 */
196	mov	x30, x18
197	b	cortex_a72_disable_ext_debug
198endfunc cortex_a72_core_pwr_dwn
199
200	/* -------------------------------------------------------
201	 * The CPU Ops cluster power down function for Cortex-A72.
202	 * -------------------------------------------------------
203	 */
204func cortex_a72_cluster_pwr_dwn
205	mov	x18, x30
206
207	/* ---------------------------------------------
208	 * Disable the L2 prefetches.
209	 * ---------------------------------------------
210	 */
211	bl	cortex_a72_disable_l2_prefetch
212
213	/* ---------------------------------------------
214	 * Disable the load-store hardware prefetcher.
215	 * ---------------------------------------------
216	 */
217	bl	cortex_a72_disable_hw_prefetcher
218
219#if !SKIP_A72_L1_FLUSH_PWR_DWN
220	/* ---------------------------------------------
221	 * Flush L1 caches.
222	 * ---------------------------------------------
223	 */
224	mov	x0, #DCCISW
225	bl	dcsw_op_level1
226#endif
227
228	/* ---------------------------------------------
229	 * Disable the optional ACP.
230	 * ---------------------------------------------
231	 */
232	bl	plat_disable_acp
233
234	/* -------------------------------------------------
235	 * Flush the L2 caches.
236	 * -------------------------------------------------
237	 */
238	mov	x0, #DCCISW
239	bl	dcsw_op_level2
240
241	/* ---------------------------------------------
242	 * Come out of intra cluster coherency
243	 * ---------------------------------------------
244	 */
245	bl	cortex_a72_disable_smp
246
247	/* ---------------------------------------------
248	 * Force the debug interfaces to be quiescent
249	 * ---------------------------------------------
250	 */
251	mov	x30, x18
252	b	cortex_a72_disable_ext_debug
253endfunc cortex_a72_cluster_pwr_dwn
254
255	/* ---------------------------------------------
256	 * This function provides cortex_a72 specific
257	 * register information for crash reporting.
258	 * It needs to return with x6 pointing to
259	 * a list of register names in ascii and
260	 * x8 - x15 having values of registers to be
261	 * reported.
262	 * ---------------------------------------------
263	 */
264.section .rodata.cortex_a72_regs, "aS"
265cortex_a72_regs:  /* The ascii list of register names to be reported */
266	.asciz	"cpuectlr_el1", "cpumerrsr_el1", "l2merrsr_el1", ""
267
268func cortex_a72_cpu_reg_dump
269	adr	x6, cortex_a72_regs
270	mrs	x8, CORTEX_A72_ECTLR_EL1
271	mrs	x9, CORTEX_A72_MERRSR_EL1
272	mrs	x10, CORTEX_A72_L2MERRSR_EL1
273	ret
274endfunc cortex_a72_cpu_reg_dump
275
276declare_cpu_ops cortex_a72, CORTEX_A72_MIDR, \
277	cortex_a72_reset_func, \
278	cortex_a72_core_pwr_dwn, \
279	cortex_a72_cluster_pwr_dwn
280