xref: /rk3399_ARM-atf/lib/cpus/aarch64/cortex_a57.S (revision fd6007de64fd7e16f6d96972643434c04a77f1c6)
1/*
2 * Copyright (c) 2014, ARM Limited and Contributors. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are met:
6 *
7 * Redistributions of source code must retain the above copyright notice, this
8 * list of conditions and the following disclaimer.
9 *
10 * Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 *
14 * Neither the name of ARM nor the names of its contributors may be used
15 * to endorse or promote products derived from this software without specific
16 * prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 * POSSIBILITY OF SUCH DAMAGE.
29 */
30#include <arch.h>
31#include <asm_macros.S>
32#include <assert_macros.S>
33#include <bl_common.h>
34#include <cortex_a57.h>
35#include <cpu_macros.S>
36#include <plat_macros.S>
37
38	/* ---------------------------------------------
39	 * Disable L1 data cache and unified L2 cache
40	 * ---------------------------------------------
41	 */
42func cortex_a57_disable_dcache
43	mrs	x1, sctlr_el3
44	bic	x1, x1, #SCTLR_C_BIT
45	msr	sctlr_el3, x1
46	isb
47	ret
48endfunc cortex_a57_disable_dcache
49
50	/* ---------------------------------------------
51	 * Disable all types of L2 prefetches.
52	 * ---------------------------------------------
53	 */
54func cortex_a57_disable_l2_prefetch
55	mrs	x0, CPUECTLR_EL1
56	orr	x0, x0, #CPUECTLR_DIS_TWD_ACC_PFTCH_BIT
57	mov	x1, #CPUECTLR_L2_IPFTCH_DIST_MASK
58	orr	x1, x1, #CPUECTLR_L2_DPFTCH_DIST_MASK
59	bic	x0, x0, x1
60	msr	CPUECTLR_EL1, x0
61	isb
62	dsb	ish
63	ret
64endfunc cortex_a57_disable_l2_prefetch
65
66	/* ---------------------------------------------
67	 * Disable intra-cluster coherency
68	 * ---------------------------------------------
69	 */
70func cortex_a57_disable_smp
71	mrs	x0, CPUECTLR_EL1
72	bic	x0, x0, #CPUECTLR_SMP_BIT
73	msr	CPUECTLR_EL1, x0
74	ret
75endfunc cortex_a57_disable_smp
76
77	/* ---------------------------------------------
78	 * Disable debug interfaces
79	 * ---------------------------------------------
80	 */
81func cortex_a57_disable_ext_debug
82	mov	x0, #1
83	msr	osdlr_el1, x0
84	isb
85	dsb	sy
86	ret
87endfunc cortex_a57_disable_ext_debug
88
89	/* --------------------------------------------------
90	 * Errata Workaround for Cortex A57 Errata #806969.
91	 * This applies only to revision r0p0 of Cortex A57.
92	 * Inputs:
93	 * x0: variant[4:7] and revision[0:3] of current cpu.
94	 * Clobbers : x0 - x5
95	 * --------------------------------------------------
96	 */
97func errata_a57_806969_wa
98	/*
99	 * Compare x0 against revision r0p0
100	 */
101	cbz	x0, apply_806969
102#if DEBUG
103	b	print_revision_warning
104#else
105	ret
106#endif
107apply_806969:
108	/*
109	 * Test if errata has already been applied in an earlier
110	 * invocation of the reset handler and does not need to
111	 * be applied again.
112	 */
113	mrs	x1, CPUACTLR_EL1
114	tst	x1, #CPUACTLR_NO_ALLOC_WBWA
115	b.ne	skip_806969
116	orr	x1, x1, #CPUACTLR_NO_ALLOC_WBWA
117	msr	CPUACTLR_EL1, x1
118skip_806969:
119	ret
120endfunc errata_a57_806969_wa
121
122
123	/* ---------------------------------------------------
124	 * Errata Workaround for Cortex A57 Errata #813420.
125	 * This applies only to revision r0p0 of Cortex A57.
126	 * Inputs:
127	 * x0: variant[4:7] and revision[0:3] of current cpu.
128	 * Clobbers : x0 - x5
129	 * ---------------------------------------------------
130	 */
131func errata_a57_813420_wa
132	/*
133	 * Compare x0 against revision r0p0
134	 */
135	cbz	x0, apply_813420
136#if DEBUG
137	b	print_revision_warning
138#else
139	ret
140#endif
141apply_813420:
142	/*
143	 * Test if errata has already been applied in an earlier
144	 * invocation of the reset handler and does not need to
145	 * be applied again.
146	 */
147	mrs	x1, CPUACTLR_EL1
148	tst	x1, #CPUACTLR_DCC_AS_DCCI
149	b.ne	skip_813420
150	orr	x1, x1, #CPUACTLR_DCC_AS_DCCI
151	msr	CPUACTLR_EL1, x1
152skip_813420:
153	ret
154endfunc errata_a57_813420_wa
155
156	/* -------------------------------------------------
157	 * The CPU Ops reset function for Cortex-A57.
158	 * Clobbers: x0-x5, x15, x19, x30
159	 * -------------------------------------------------
160	 */
161func cortex_a57_reset_func
162	mov	x19, x30
163	mrs	x0, midr_el1
164
165	/*
166	 * Extract the variant[20:23] and revision[0:3] from x0
167	 * and pack it in x15[0:7] as variant[4:7] and revision[0:3].
168	 * First extract x0[16:23] to x15[0:7] and zero fill the rest.
169	 * Then extract x0[0:3] into x15[0:3] retaining other bits.
170	 */
171	ubfx	x15, x0, #(MIDR_VAR_SHIFT - MIDR_REV_BITS), #(MIDR_REV_BITS + MIDR_VAR_BITS)
172	bfxil	x15, x0, #MIDR_REV_SHIFT, #MIDR_REV_BITS
173
174#if ERRATA_A57_806969
175	mov	x0, x15
176	bl	errata_a57_806969_wa
177#endif
178
179#if ERRATA_A57_813420
180	mov	x0, x15
181	bl	errata_a57_813420_wa
182#endif
183
184	/* ---------------------------------------------
185	 * As a bare minimum enable the SMP bit if it is
186	 * not already set.
187	 * ---------------------------------------------
188	 */
189	mrs	x0, CPUECTLR_EL1
190	tst	x0, #CPUECTLR_SMP_BIT
191	b.ne	skip_smp_setup
192	orr	x0, x0, #CPUECTLR_SMP_BIT
193	msr	CPUECTLR_EL1, x0
194skip_smp_setup:
195	isb
196	ret	x19
197endfunc cortex_a57_reset_func
198
199	/* ----------------------------------------------------
200	 * The CPU Ops core power down function for Cortex-A57.
201	 * ----------------------------------------------------
202	 */
203func cortex_a57_core_pwr_dwn
204	mov	x18, x30
205
206	/* ---------------------------------------------
207	 * Turn off caches.
208	 * ---------------------------------------------
209	 */
210	bl	cortex_a57_disable_dcache
211
212	/* ---------------------------------------------
213	 * Disable the L2 prefetches.
214	 * ---------------------------------------------
215	 */
216	bl	cortex_a57_disable_l2_prefetch
217
218	/* ---------------------------------------------
219	 * Flush L1 caches.
220	 * ---------------------------------------------
221	 */
222	mov	x0, #DCCISW
223	bl	dcsw_op_level1
224
225	/* ---------------------------------------------
226	 * Come out of intra cluster coherency
227	 * ---------------------------------------------
228	 */
229	bl	cortex_a57_disable_smp
230
231	/* ---------------------------------------------
232	 * Force the debug interfaces to be quiescent
233	 * ---------------------------------------------
234	 */
235	mov	x30, x18
236	b	cortex_a57_disable_ext_debug
237endfunc cortex_a57_core_pwr_dwn
238
239	/* -------------------------------------------------------
240	 * The CPU Ops cluster power down function for Cortex-A57.
241	 * -------------------------------------------------------
242	 */
243func cortex_a57_cluster_pwr_dwn
244	mov	x18, x30
245
246	/* ---------------------------------------------
247	 * Turn off caches.
248	 * ---------------------------------------------
249	 */
250	bl	cortex_a57_disable_dcache
251
252	/* ---------------------------------------------
253	 * Disable the L2 prefetches.
254	 * ---------------------------------------------
255	 */
256	bl	cortex_a57_disable_l2_prefetch
257
258#if !SKIP_A57_L1_FLUSH_PWR_DWN
259	/* -------------------------------------------------
260	 * Flush the L1 caches.
261	 * -------------------------------------------------
262	 */
263	mov	x0, #DCCISW
264	bl	dcsw_op_level1
265#endif
266	/* ---------------------------------------------
267	 * Disable the optional ACP.
268	 * ---------------------------------------------
269	 */
270	bl	plat_disable_acp
271
272	/* -------------------------------------------------
273	 * Flush the L2 caches.
274	 * -------------------------------------------------
275	 */
276	mov	x0, #DCCISW
277	bl	dcsw_op_level2
278
279	/* ---------------------------------------------
280	 * Come out of intra cluster coherency
281	 * ---------------------------------------------
282	 */
283	bl	cortex_a57_disable_smp
284
285	/* ---------------------------------------------
286	 * Force the debug interfaces to be quiescent
287	 * ---------------------------------------------
288	 */
289	mov	x30, x18
290	b	cortex_a57_disable_ext_debug
291endfunc cortex_a57_cluster_pwr_dwn
292
293	/* ---------------------------------------------
294	 * This function provides cortex_a57 specific
295	 * register information for crash reporting.
296	 * It needs to return with x6 pointing to
297	 * a list of register names in ascii and
298	 * x8 - x15 having values of registers to be
299	 * reported.
300	 * ---------------------------------------------
301	 */
302.section .rodata.cortex_a57_regs, "aS"
303cortex_a57_regs:  /* The ascii list of register names to be reported */
304	.asciz	"cpuectlr_el1", ""
305
306func cortex_a57_cpu_reg_dump
307	adr	x6, cortex_a57_regs
308	mrs	x8, CPUECTLR_EL1
309	ret
310endfunc cortex_a57_cpu_reg_dump
311
312
313declare_cpu_ops cortex_a57, CORTEX_A57_MIDR
314