xref: /rk3399_ARM-atf/lib/cpus/aarch64/cortex_a72.S (revision 989960cf94cfb8d62b91a1653525bd93800b0f95)
1/*
2 * Copyright (c) 2015-2023, Arm Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6#include <arch.h>
7#include <asm_macros.S>
8#include <assert_macros.S>
9#include <cortex_a72.h>
10#include <cpu_macros.S>
11#include <plat_macros.S>
12#include "wa_cve_2022_23960_bhb_vector.S"
13
14#if WORKAROUND_CVE_2022_23960
15	wa_cve_2022_23960_bhb_vector_table CORTEX_A72_BHB_LOOP_COUNT, cortex_a72
16#endif /* WORKAROUND_CVE_2022_23960 */
17
18	/* ---------------------------------------------
19	 * Disable L1 data cache and unified L2 cache
20	 * ---------------------------------------------
21	 */
22func cortex_a72_disable_dcache
23	mrs	x1, sctlr_el3
24	bic	x1, x1, #SCTLR_C_BIT
25	msr	sctlr_el3, x1
26	isb
27	ret
28endfunc cortex_a72_disable_dcache
29
30	/* ---------------------------------------------
31	 * Disable all types of L2 prefetches.
32	 * ---------------------------------------------
33	 */
34func cortex_a72_disable_l2_prefetch
35	mrs	x0, CORTEX_A72_ECTLR_EL1
36	orr	x0, x0, #CORTEX_A72_ECTLR_DIS_TWD_ACC_PFTCH_BIT
37	mov	x1, #CORTEX_A72_ECTLR_L2_IPFTCH_DIST_MASK
38	orr	x1, x1, #CORTEX_A72_ECTLR_L2_DPFTCH_DIST_MASK
39	bic	x0, x0, x1
40	msr	CORTEX_A72_ECTLR_EL1, x0
41	isb
42	ret
43endfunc cortex_a72_disable_l2_prefetch
44
45	/* ---------------------------------------------
46	 * Disable the load-store hardware prefetcher.
47	 * ---------------------------------------------
48	 */
49func cortex_a72_disable_hw_prefetcher
50	mrs	x0, CORTEX_A72_CPUACTLR_EL1
51	orr	x0, x0, #CORTEX_A72_CPUACTLR_EL1_DISABLE_L1_DCACHE_HW_PFTCH
52	msr	CORTEX_A72_CPUACTLR_EL1, x0
53	isb
54	dsb	ish
55	ret
56endfunc cortex_a72_disable_hw_prefetcher
57
58	/* ---------------------------------------------
59	 * Disable intra-cluster coherency
60	 * ---------------------------------------------
61	 */
62func cortex_a72_disable_smp
63	mrs	x0, CORTEX_A72_ECTLR_EL1
64	bic	x0, x0, #CORTEX_A72_ECTLR_SMP_BIT
65	msr	CORTEX_A72_ECTLR_EL1, x0
66	ret
67endfunc cortex_a72_disable_smp
68
69	/* ---------------------------------------------
70	 * Disable debug interfaces
71	 * ---------------------------------------------
72	 */
73func cortex_a72_disable_ext_debug
74	mov	x0, #1
75	msr	osdlr_el1, x0
76	isb
77	dsb	sy
78	ret
79endfunc cortex_a72_disable_ext_debug
80
81func check_smccc_arch_workaround_3
82	cpu_check_csv2	x0, 1f
83	mov	x0, #ERRATA_APPLIES
84	ret
851:
86	mov	x0, #ERRATA_NOT_APPLIES
87	ret
88endfunc check_smccc_arch_workaround_3
89
90workaround_reset_start cortex_a72, ERRATUM(859971), ERRATA_A72_859971
91	mrs	x1, CORTEX_A72_CPUACTLR_EL1
92	orr	x1, x1, #CORTEX_A72_CPUACTLR_EL1_DIS_INSTR_PREFETCH
93	msr	CORTEX_A72_CPUACTLR_EL1, x1
94workaround_reset_end cortex_a72, ERRATUM(859971)
95
96check_erratum_ls cortex_a72, ERRATUM(859971), CPU_REV(0, 3)
97
98/* Due to the nature of the errata it is applied unconditionally when chosen */
99check_erratum_chosen cortex_a72, ERRATUM(1319367), ERRATA_A72_1319367
100/* erratum workaround is interleaved with generic code */
101add_erratum_entry cortex_a72, ERRATUM(1319367), ERRATA_A72_1319367, NO_APPLY_AT_RESET
102
103workaround_reset_start cortex_a72, CVE(2017, 5715), WORKAROUND_CVE_2017_5715
104#if IMAGE_BL31
105	adr	x0, wa_cve_2017_5715_mmu_vbar
106	msr	vbar_el3, x0
107#endif
108workaround_reset_end cortex_a72, CVE(2017, 5715)
109
110check_erratum_custom_start cortex_a72, CVE(2017, 5715)
111	cpu_check_csv2	x0, 1f
112#if WORKAROUND_CVE_2017_5715
113	mov	x0, #ERRATA_APPLIES
114#else
115	mov	x0, #ERRATA_MISSING
116#endif
117	ret
1181:
119	mov	x0, #ERRATA_NOT_APPLIES
120	ret
121check_erratum_custom_end cortex_a72, CVE(2017, 5715)
122
123workaround_reset_start cortex_a72, CVE(2018, 3639), WORKAROUND_CVE_2018_3639
124	mrs	x0, CORTEX_A72_CPUACTLR_EL1
125	orr	x0, x0, #CORTEX_A72_CPUACTLR_EL1_DIS_LOAD_PASS_STORE
126	msr	CORTEX_A72_CPUACTLR_EL1, x0
127	isb
128	dsb	sy
129workaround_reset_end cortex_a72, CVE(2018, 3639)
130check_erratum_chosen cortex_a72, CVE(2018, 3639), WORKAROUND_CVE_2018_3639
131
132workaround_reset_start cortex_a72, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
133#if IMAGE_BL31
134	/* Skip installing vector table again if already done for CVE(2017, 5715) */
135	/*
136	 * The Cortex-A72 generic vectors are overridden to apply the
137	 * mitigation on exception entry from lower ELs for revisions >= r1p0
138	 * which has CSV2 implemented.
139	 */
140	adr	x0, wa_cve_vbar_cortex_a72
141	mrs	x1, vbar_el3
142	cmp	x0, x1
143	b.eq	1f
144	msr	vbar_el3, x0
1451:
146#endif /* IMAGE_BL31 */
147workaround_reset_end cortex_a72, CVE(2022, 23960)
148
149check_erratum_custom_start cortex_a72, CVE(2022, 23960)
150#if WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960
151	cpu_check_csv2	x0, 1f
152	mov	x0, #ERRATA_APPLIES
153	ret
1541:
155#if WORKAROUND_CVE_2022_23960
156	mov	x0, #ERRATA_APPLIES
157#else
158	mov	x0, #ERRATA_MISSING
159#endif /* WORKAROUND_CVE_2022_23960 */
160	ret
161#endif /* WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960 */
162	mov	x0, #ERRATA_MISSING
163	ret
164check_erratum_custom_end cortex_a72, CVE(2022, 23960)
165
166cpu_reset_func_start cortex_a72
167
168	/* ---------------------------------------------
169	 * Enable the SMP bit.
170	 * ---------------------------------------------
171	 */
172	mrs	x0, CORTEX_A72_ECTLR_EL1
173	orr	x0, x0, #CORTEX_A72_ECTLR_SMP_BIT
174	msr	CORTEX_A72_ECTLR_EL1, x0
175
176cpu_reset_func_end cortex_a72
177
178	/* ----------------------------------------------------
179	 * The CPU Ops core power down function for Cortex-A72.
180	 * ----------------------------------------------------
181	 */
182func cortex_a72_core_pwr_dwn
183	mov	x18, x30
184
185	/* ---------------------------------------------
186	 * Turn off caches.
187	 * ---------------------------------------------
188	 */
189	bl	cortex_a72_disable_dcache
190
191	/* ---------------------------------------------
192	 * Disable the L2 prefetches.
193	 * ---------------------------------------------
194	 */
195	bl	cortex_a72_disable_l2_prefetch
196
197	/* ---------------------------------------------
198	 * Disable the load-store hardware prefetcher.
199	 * ---------------------------------------------
200	 */
201	bl	cortex_a72_disable_hw_prefetcher
202
203	/* ---------------------------------------------
204	 * Flush L1 caches.
205	 * ---------------------------------------------
206	 */
207	mov	x0, #DCCISW
208	bl	dcsw_op_level1
209
210	/* ---------------------------------------------
211	 * Come out of intra cluster coherency
212	 * ---------------------------------------------
213	 */
214	bl	cortex_a72_disable_smp
215
216	/* ---------------------------------------------
217	 * Force the debug interfaces to be quiescent
218	 * ---------------------------------------------
219	 */
220	mov	x30, x18
221	b	cortex_a72_disable_ext_debug
222endfunc cortex_a72_core_pwr_dwn
223
224	/* -------------------------------------------------------
225	 * The CPU Ops cluster power down function for Cortex-A72.
226	 * -------------------------------------------------------
227	 */
228func cortex_a72_cluster_pwr_dwn
229	mov	x18, x30
230
231	/* ---------------------------------------------
232	 * Turn off caches.
233	 * ---------------------------------------------
234	 */
235	bl	cortex_a72_disable_dcache
236
237	/* ---------------------------------------------
238	 * Disable the L2 prefetches.
239	 * ---------------------------------------------
240	 */
241	bl	cortex_a72_disable_l2_prefetch
242
243	/* ---------------------------------------------
244	 * Disable the load-store hardware prefetcher.
245	 * ---------------------------------------------
246	 */
247	bl	cortex_a72_disable_hw_prefetcher
248
249#if !SKIP_A72_L1_FLUSH_PWR_DWN
250	/* ---------------------------------------------
251	 * Flush L1 caches.
252	 * ---------------------------------------------
253	 */
254	mov	x0, #DCCISW
255	bl	dcsw_op_level1
256#endif
257
258	/* ---------------------------------------------
259	 * Disable the optional ACP.
260	 * ---------------------------------------------
261	 */
262	bl	plat_disable_acp
263
264	/* -------------------------------------------------
265	 * Flush the L2 caches.
266	 * -------------------------------------------------
267	 */
268	mov	x0, #DCCISW
269	bl	dcsw_op_level2
270
271	/* ---------------------------------------------
272	 * Come out of intra cluster coherency
273	 * ---------------------------------------------
274	 */
275	bl	cortex_a72_disable_smp
276
277	/* ---------------------------------------------
278	 * Force the debug interfaces to be quiescent
279	 * ---------------------------------------------
280	 */
281	mov	x30, x18
282	b	cortex_a72_disable_ext_debug
283endfunc cortex_a72_cluster_pwr_dwn
284
285errata_report_shim cortex_a72
286
287	/* ---------------------------------------------
288	 * This function provides cortex_a72 specific
289	 * register information for crash reporting.
290	 * It needs to return with x6 pointing to
291	 * a list of register names in ascii and
292	 * x8 - x15 having values of registers to be
293	 * reported.
294	 * ---------------------------------------------
295	 */
296.section .rodata.cortex_a72_regs, "aS"
297cortex_a72_regs:  /* The ascii list of register names to be reported */
298	.asciz	"cpuectlr_el1", "cpumerrsr_el1", "l2merrsr_el1", ""
299
300func cortex_a72_cpu_reg_dump
301	adr	x6, cortex_a72_regs
302	mrs	x8, CORTEX_A72_ECTLR_EL1
303	mrs	x9, CORTEX_A72_MERRSR_EL1
304	mrs	x10, CORTEX_A72_L2MERRSR_EL1
305	ret
306endfunc cortex_a72_cpu_reg_dump
307
308declare_cpu_ops_wa cortex_a72, CORTEX_A72_MIDR, \
309	cortex_a72_reset_func, \
310	check_erratum_cortex_a72_5715, \
311	CPU_NO_EXTRA2_FUNC, \
312	check_smccc_arch_workaround_3, \
313	cortex_a72_core_pwr_dwn, \
314	cortex_a72_cluster_pwr_dwn
315