xref: /rk3399_ARM-atf/lib/cpus/aarch32/cortex_a72.S (revision cc4f3838633e8faab00323228140c025d173ae00)
1dc787588SYatharth Kochar/*
2*3fb52e41SRyan Everett * Copyright (c) 2017-2024, Arm Limited and Contributors. All rights reserved.
3dc787588SYatharth Kochar *
482cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause
5dc787588SYatharth Kochar */
6dc787588SYatharth Kochar#include <arch.h>
7dc787588SYatharth Kochar#include <asm_macros.S>
8dc787588SYatharth Kochar#include <assert_macros.S>
909d40e0eSAntonio Nino Diaz#include <common/debug.h>
10dc787588SYatharth Kochar#include <cortex_a72.h>
11dc787588SYatharth Kochar#include <cpu_macros.S>
12dc787588SYatharth Kochar
13dc787588SYatharth Kochar	/* ---------------------------------------------
14dc787588SYatharth Kochar	 * Disable all types of L2 prefetches.
15dc787588SYatharth Kochar	 * ---------------------------------------------
16dc787588SYatharth Kochar	 */
17dc787588SYatharth Kocharfunc cortex_a72_disable_l2_prefetch
18fb7d32e5SVarun Wadekar	ldcopr16	r0, r1, CORTEX_A72_ECTLR
19fb7d32e5SVarun Wadekar	orr64_imm	r0, r1, CORTEX_A72_ECTLR_DIS_TWD_ACC_PFTCH_BIT
20fb7d32e5SVarun Wadekar	bic64_imm	r0, r1, (CORTEX_A72_ECTLR_L2_IPFTCH_DIST_MASK | \
21fb7d32e5SVarun Wadekar				CORTEX_A72_ECTLR_L2_DPFTCH_DIST_MASK)
22fb7d32e5SVarun Wadekar	stcopr16	r0, r1, CORTEX_A72_ECTLR
23dc787588SYatharth Kochar	isb
24dc787588SYatharth Kochar	bx	lr
25dc787588SYatharth Kocharendfunc cortex_a72_disable_l2_prefetch
26dc787588SYatharth Kochar
27dc787588SYatharth Kochar	/* ---------------------------------------------
28dc787588SYatharth Kochar	 * Disable the load-store hardware prefetcher.
29dc787588SYatharth Kochar	 * ---------------------------------------------
30dc787588SYatharth Kochar	 */
31dc787588SYatharth Kocharfunc cortex_a72_disable_hw_prefetcher
3280bcf981SEleanor Bonnici	ldcopr16	r0, r1, CORTEX_A72_CPUACTLR
3380bcf981SEleanor Bonnici	orr64_imm	r0, r1, CORTEX_A72_CPUACTLR_DISABLE_L1_DCACHE_HW_PFTCH
3480bcf981SEleanor Bonnici	stcopr16	r0, r1, CORTEX_A72_CPUACTLR
35dc787588SYatharth Kochar	isb
36dc787588SYatharth Kochar	dsb	ish
37dc787588SYatharth Kochar	bx	lr
38dc787588SYatharth Kocharendfunc cortex_a72_disable_hw_prefetcher
39dc787588SYatharth Kochar
40dc787588SYatharth Kochar	/* ---------------------------------------------
41dc787588SYatharth Kochar	 * Disable intra-cluster coherency
42dc787588SYatharth Kochar	 * Clobbers: r0-r1
43dc787588SYatharth Kochar	 * ---------------------------------------------
44dc787588SYatharth Kochar	 */
45dc787588SYatharth Kocharfunc cortex_a72_disable_smp
46fb7d32e5SVarun Wadekar	ldcopr16	r0, r1, CORTEX_A72_ECTLR
47fb7d32e5SVarun Wadekar	bic64_imm	r0, r1, CORTEX_A72_ECTLR_SMP_BIT
48fb7d32e5SVarun Wadekar	stcopr16	r0, r1, CORTEX_A72_ECTLR
49dc787588SYatharth Kochar	bx	lr
50dc787588SYatharth Kocharendfunc cortex_a72_disable_smp
51dc787588SYatharth Kochar
52dc787588SYatharth Kochar	/* ---------------------------------------------
53dc787588SYatharth Kochar	 * Disable debug interfaces
54dc787588SYatharth Kochar	 * ---------------------------------------------
55dc787588SYatharth Kochar	 */
56dc787588SYatharth Kocharfunc cortex_a72_disable_ext_debug
57dc787588SYatharth Kochar	mov	r0, #1
58dc787588SYatharth Kochar	stcopr	r0, DBGOSDLR
59dc787588SYatharth Kochar	isb
60dc787588SYatharth Kochar	dsb	sy
61dc787588SYatharth Kochar	bx	lr
62dc787588SYatharth Kocharendfunc cortex_a72_disable_ext_debug
63dc787588SYatharth Kochar
646de9b336SEleanor Bonnici	/* ---------------------------------------------------
656de9b336SEleanor Bonnici	 * Errata Workaround for Cortex A72 Errata #859971.
666de9b336SEleanor Bonnici	 * This applies only to revision <= r0p3 of Cortex A72.
676de9b336SEleanor Bonnici	 * Inputs:
686de9b336SEleanor Bonnici	 * r0: variant[4:7] and revision[0:3] of current cpu.
696de9b336SEleanor Bonnici	 * Shall clobber: r0-r3
706de9b336SEleanor Bonnici	 * ---------------------------------------------------
716de9b336SEleanor Bonnici	 */
726de9b336SEleanor Bonnicifunc errata_a72_859971_wa
736de9b336SEleanor Bonnici	mov		r2,lr
746de9b336SEleanor Bonnici	bl		check_errata_859971
756de9b336SEleanor Bonnici	mov		lr, r2
766de9b336SEleanor Bonnici	cmp		r0, #ERRATA_NOT_APPLIES
776de9b336SEleanor Bonnici	beq		1f
786de9b336SEleanor Bonnici	ldcopr16	r0, r1, CORTEX_A72_CPUACTLR
796de9b336SEleanor Bonnici	orr64_imm	r1, r1, CORTEX_A72_CPUACTLR_DIS_INSTR_PREFETCH
806de9b336SEleanor Bonnici	stcopr16	r0, r1, CORTEX_A72_CPUACTLR
816de9b336SEleanor Bonnici1:
826de9b336SEleanor Bonnici	bx   		lr
836de9b336SEleanor Bonniciendfunc errata_a72_859971_wa
846de9b336SEleanor Bonnici
856de9b336SEleanor Bonnicifunc check_errata_859971
866de9b336SEleanor Bonnici	mov		r1, #0x03
876de9b336SEleanor Bonnici	b		cpu_rev_var_ls
886de9b336SEleanor Bonniciendfunc check_errata_859971
896de9b336SEleanor Bonnici
90f337f39cSMaksims Svecovsadd_erratum_entry cortex_a72, ERRATUM(859971), ERRATA_A72_859971
91f337f39cSMaksims Svecovs
92e4b34efaSDimitris Papastamosfunc check_errata_cve_2017_5715
93e4b34efaSDimitris Papastamos	mov	r0, #ERRATA_MISSING
94e4b34efaSDimitris Papastamos	bx	lr
95e4b34efaSDimitris Papastamosendfunc check_errata_cve_2017_5715
966de9b336SEleanor Bonnici
97f337f39cSMaksims Svecovsadd_erratum_entry cortex_a72, CVE(2017, 5715), WORKAROUND_CVE_2017_5715
98f337f39cSMaksims Svecovs
99e0865708SDimitris Papastamosfunc check_errata_cve_2018_3639
100e0865708SDimitris Papastamos#if WORKAROUND_CVE_2018_3639
101e0865708SDimitris Papastamos	mov	r0, #ERRATA_APPLIES
102e0865708SDimitris Papastamos#else
103e0865708SDimitris Papastamos	mov	r0, #ERRATA_MISSING
104e0865708SDimitris Papastamos#endif
105e0865708SDimitris Papastamos	bx	lr
106e0865708SDimitris Papastamosendfunc check_errata_cve_2018_3639
107e0865708SDimitris Papastamos
108f337f39cSMaksims Svecovsadd_erratum_entry cortex_a72, CVE(2018, 3639), WORKAROUND_CVE_2018_3639
109f337f39cSMaksims Svecovs
1102e5d7a4bSJohn Powellfunc check_errata_cve_2022_23960
1112e5d7a4bSJohn Powell	mov	r0, #ERRATA_MISSING
1122e5d7a4bSJohn Powell	bx	lr
1132e5d7a4bSJohn Powellendfunc check_errata_cve_2022_23960
1142e5d7a4bSJohn Powell
115f337f39cSMaksims Svecovsadd_erratum_entry cortex_a72, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
116f337f39cSMaksims Svecovs
117dc787588SYatharth Kochar	/* -------------------------------------------------
118dc787588SYatharth Kochar	 * The CPU Ops reset function for Cortex-A72.
119dc787588SYatharth Kochar	 * -------------------------------------------------
120dc787588SYatharth Kochar	 */
121dc787588SYatharth Kocharfunc cortex_a72_reset_func
1226de9b336SEleanor Bonnici	mov	r5, lr
1236de9b336SEleanor Bonnici	bl	cpu_get_rev_var
1246de9b336SEleanor Bonnici	mov	r4, r0
1256de9b336SEleanor Bonnici
1266de9b336SEleanor Bonnici#if ERRATA_A72_859971
1276de9b336SEleanor Bonnici	mov	r0, r4
1286de9b336SEleanor Bonnici	bl	errata_a72_859971_wa
1296de9b336SEleanor Bonnici#endif
130e0865708SDimitris Papastamos
131e0865708SDimitris Papastamos#if WORKAROUND_CVE_2018_3639
132e0865708SDimitris Papastamos	ldcopr16	r0, r1, CORTEX_A72_CPUACTLR
133e0865708SDimitris Papastamos	orr64_imm	r0, r1, CORTEX_A72_CPUACTLR_DIS_LOAD_PASS_STORE
134e0865708SDimitris Papastamos	stcopr16	r0, r1, CORTEX_A72_CPUACTLR
135e0865708SDimitris Papastamos	isb
136e0865708SDimitris Papastamos	dsb	sy
137e0865708SDimitris Papastamos#endif
138e0865708SDimitris Papastamos
139dc787588SYatharth Kochar	/* ---------------------------------------------
140dc787588SYatharth Kochar	 * Enable the SMP bit.
141dc787588SYatharth Kochar	 * ---------------------------------------------
142dc787588SYatharth Kochar	 */
143fb7d32e5SVarun Wadekar	ldcopr16	r0, r1, CORTEX_A72_ECTLR
144fb7d32e5SVarun Wadekar	orr64_imm	r0, r1, CORTEX_A72_ECTLR_SMP_BIT
145fb7d32e5SVarun Wadekar	stcopr16	r0, r1,	CORTEX_A72_ECTLR
146dc787588SYatharth Kochar	isb
1472dc80e49SManoj Kumar	bx	r5
148dc787588SYatharth Kocharendfunc cortex_a72_reset_func
149dc787588SYatharth Kochar
150dc787588SYatharth Kochar	/* ----------------------------------------------------
151dc787588SYatharth Kochar	 * The CPU Ops core power down function for Cortex-A72.
152dc787588SYatharth Kochar	 * ----------------------------------------------------
153dc787588SYatharth Kochar	 */
154dc787588SYatharth Kocharfunc cortex_a72_core_pwr_dwn
155dc787588SYatharth Kochar	push	{r12, lr}
156dc787588SYatharth Kochar
157dc787588SYatharth Kochar	/* Assert if cache is enabled */
1585f70d8deSMatt Ma#if ENABLE_ASSERTIONS
159dc787588SYatharth Kochar	ldcopr	r0, SCTLR
160dc787588SYatharth Kochar	tst	r0, #SCTLR_C_BIT
161dc787588SYatharth Kochar	ASM_ASSERT(eq)
162dc787588SYatharth Kochar#endif
163dc787588SYatharth Kochar
164dc787588SYatharth Kochar	/* ---------------------------------------------
165dc787588SYatharth Kochar	 * Disable the L2 prefetches.
166dc787588SYatharth Kochar	 * ---------------------------------------------
167dc787588SYatharth Kochar	 */
168dc787588SYatharth Kochar	bl	cortex_a72_disable_l2_prefetch
169dc787588SYatharth Kochar
170dc787588SYatharth Kochar	/* ---------------------------------------------
171dc787588SYatharth Kochar	 * Disable the load-store hardware prefetcher.
172dc787588SYatharth Kochar	 * ---------------------------------------------
173dc787588SYatharth Kochar	 */
174dc787588SYatharth Kochar	bl	cortex_a72_disable_hw_prefetcher
175dc787588SYatharth Kochar
176dc787588SYatharth Kochar	/* ---------------------------------------------
177dc787588SYatharth Kochar	 * Flush L1 caches.
178dc787588SYatharth Kochar	 * ---------------------------------------------
179dc787588SYatharth Kochar	 */
180dc787588SYatharth Kochar	mov	r0, #DC_OP_CISW
181dc787588SYatharth Kochar	bl	dcsw_op_level1
182dc787588SYatharth Kochar
183dc787588SYatharth Kochar	/* ---------------------------------------------
184dc787588SYatharth Kochar	 * Come out of intra cluster coherency
185dc787588SYatharth Kochar	 * ---------------------------------------------
186dc787588SYatharth Kochar	 */
187dc787588SYatharth Kochar	bl	cortex_a72_disable_smp
188dc787588SYatharth Kochar
189dc787588SYatharth Kochar	/* ---------------------------------------------
190dc787588SYatharth Kochar	 * Force the debug interfaces to be quiescent
191dc787588SYatharth Kochar	 * ---------------------------------------------
192dc787588SYatharth Kochar	 */
193dc787588SYatharth Kochar	pop	{r12, lr}
194dc787588SYatharth Kochar	b	cortex_a72_disable_ext_debug
195dc787588SYatharth Kocharendfunc cortex_a72_core_pwr_dwn
196dc787588SYatharth Kochar
197dc787588SYatharth Kochar	/* -------------------------------------------------------
198dc787588SYatharth Kochar	 * The CPU Ops cluster power down function for Cortex-A72.
199dc787588SYatharth Kochar	 * -------------------------------------------------------
200dc787588SYatharth Kochar	 */
201dc787588SYatharth Kocharfunc cortex_a72_cluster_pwr_dwn
202dc787588SYatharth Kochar	push	{r12, lr}
203dc787588SYatharth Kochar
204dc787588SYatharth Kochar	/* Assert if cache is enabled */
2055f70d8deSMatt Ma#if ENABLE_ASSERTIONS
206dc787588SYatharth Kochar	ldcopr	r0, SCTLR
207dc787588SYatharth Kochar	tst	r0, #SCTLR_C_BIT
208dc787588SYatharth Kochar	ASM_ASSERT(eq)
209dc787588SYatharth Kochar#endif
210dc787588SYatharth Kochar
211dc787588SYatharth Kochar	/* ---------------------------------------------
212dc787588SYatharth Kochar	 * Disable the L2 prefetches.
213dc787588SYatharth Kochar	 * ---------------------------------------------
214dc787588SYatharth Kochar	 */
215dc787588SYatharth Kochar	bl	cortex_a72_disable_l2_prefetch
216dc787588SYatharth Kochar
217dc787588SYatharth Kochar	/* ---------------------------------------------
218dc787588SYatharth Kochar	 * Disable the load-store hardware prefetcher.
219dc787588SYatharth Kochar	 * ---------------------------------------------
220dc787588SYatharth Kochar	 */
221dc787588SYatharth Kochar	bl	cortex_a72_disable_hw_prefetcher
222dc787588SYatharth Kochar
223dc787588SYatharth Kochar#if !SKIP_A72_L1_FLUSH_PWR_DWN
224dc787588SYatharth Kochar	/* ---------------------------------------------
225dc787588SYatharth Kochar	 * Flush L1 caches.
226dc787588SYatharth Kochar	 * ---------------------------------------------
227dc787588SYatharth Kochar	 */
228dc787588SYatharth Kochar	mov	r0, #DC_OP_CISW
229dc787588SYatharth Kochar	bl	dcsw_op_level1
230dc787588SYatharth Kochar#endif
231dc787588SYatharth Kochar
232dc787588SYatharth Kochar	/* ---------------------------------------------
233dc787588SYatharth Kochar	 * Disable the optional ACP.
234dc787588SYatharth Kochar	 * ---------------------------------------------
235dc787588SYatharth Kochar	 */
236dc787588SYatharth Kochar	bl	plat_disable_acp
237dc787588SYatharth Kochar
238dc787588SYatharth Kochar	/* -------------------------------------------------
239dc787588SYatharth Kochar	 * Flush the L2 caches.
240dc787588SYatharth Kochar	 * -------------------------------------------------
241dc787588SYatharth Kochar	 */
242dc787588SYatharth Kochar	mov	r0, #DC_OP_CISW
243dc787588SYatharth Kochar	bl	dcsw_op_level2
244dc787588SYatharth Kochar
245dc787588SYatharth Kochar	/* ---------------------------------------------
246dc787588SYatharth Kochar	 * Come out of intra cluster coherency
247dc787588SYatharth Kochar	 * ---------------------------------------------
248dc787588SYatharth Kochar	 */
249dc787588SYatharth Kochar	bl	cortex_a72_disable_smp
250dc787588SYatharth Kochar
251dc787588SYatharth Kochar	/* ---------------------------------------------
252dc787588SYatharth Kochar	 * Force the debug interfaces to be quiescent
253dc787588SYatharth Kochar	 * ---------------------------------------------
254dc787588SYatharth Kochar	 */
255dc787588SYatharth Kochar	pop	{r12, lr}
256dc787588SYatharth Kochar	b	cortex_a72_disable_ext_debug
257dc787588SYatharth Kocharendfunc cortex_a72_cluster_pwr_dwn
258dc787588SYatharth Kochar
259dc787588SYatharth Kochardeclare_cpu_ops cortex_a72, CORTEX_A72_MIDR, \
260dc787588SYatharth Kochar	cortex_a72_reset_func, \
261dc787588SYatharth Kochar	cortex_a72_core_pwr_dwn, \
262dc787588SYatharth Kochar	cortex_a72_cluster_pwr_dwn
263