xref: /rk3399_ARM-atf/lib/cpus/aarch32/cortex_a32.S (revision 51faada71a219a8b94cd8d8e423f0f22e9da4d8f)
1/*
2 * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are met:
6 *
7 * Redistributions of source code must retain the above copyright notice, this
8 * list of conditions and the following disclaimer.
9 *
10 * Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 *
14 * Neither the name of ARM nor the names of its contributors may be used
15 * to endorse or promote products derived from this software without specific
16 * prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 * POSSIBILITY OF SUCH DAMAGE.
29 */
30
31#include <arch.h>
32#include <asm_macros.S>
33#include <assert_macros.S>
34#include <cortex_a32.h>
35#include <cpu_macros.S>
36
37
38	/* ---------------------------------------------
39	 * Disable intra-cluster coherency
40	 * Clobbers: r0-r1
41	 * ---------------------------------------------
42	 */
43func cortex_a32_disable_smp
44	ldcopr16	r0, r1, CORTEX_A32_CPUECTLR_EL1
45	bic	r0, r0, #CORTEX_A32_CPUECTLR_SMPEN_BIT
46	stcopr16	r0, r1, CORTEX_A32_CPUECTLR_EL1
47	isb
48	dsb	sy
49	bx	lr
50endfunc cortex_a32_disable_smp
51
52	/* -------------------------------------------------
53	 * The CPU Ops reset function for Cortex-A32.
54	 * Clobbers: r0-r1
55	 * -------------------------------------------------
56	 */
57func cortex_a32_reset_func
58	/* ---------------------------------------------
59	 * Enable the SMP bit.
60	 * ---------------------------------------------
61	 */
62	ldcopr16	r0, r1, CORTEX_A32_CPUECTLR_EL1
63	orr	r0, r0, #CORTEX_A32_CPUECTLR_SMPEN_BIT
64	stcopr16	r0, r1, CORTEX_A32_CPUECTLR_EL1
65	isb
66	bx	lr
67endfunc cortex_a32_reset_func
68
69	/* ----------------------------------------------------
70	 * The CPU Ops core power down function for Cortex-A32.
71	 * Clobbers: r0-r3
72	 * ----------------------------------------------------
73	 */
74func cortex_a32_core_pwr_dwn
75	/* r12 is pushed to meet the 8 byte stack alignment requirement */
76	push	{r12, lr}
77
78	/* Assert if cache is enabled */
79#if ASM_ASSERTION
80	ldcopr	r0, SCTLR
81	tst	r0, #SCTLR_C_BIT
82	ASM_ASSERT(eq)
83#endif
84
85	/* ---------------------------------------------
86	 * Flush L1 caches.
87	 * ---------------------------------------------
88	 */
89	mov	r0, #DC_OP_CISW
90	bl	dcsw_op_level1
91
92	/* ---------------------------------------------
93	 * Come out of intra cluster coherency
94	 * ---------------------------------------------
95	 */
96	pop	{r12, lr}
97	b	cortex_a32_disable_smp
98endfunc cortex_a32_core_pwr_dwn
99
100	/* -------------------------------------------------------
101	 * The CPU Ops cluster power down function for Cortex-A32.
102	 * Clobbers: r0-r3
103	 * -------------------------------------------------------
104	 */
105func cortex_a32_cluster_pwr_dwn
106	/* r12 is pushed to meet the 8 byte stack alignment requirement */
107	push	{r12, lr}
108
109	/* Assert if cache is enabled */
110#if ASM_ASSERTION
111	ldcopr	r0, SCTLR
112	tst	r0, #SCTLR_C_BIT
113	ASM_ASSERT(eq)
114#endif
115
116	/* ---------------------------------------------
117	 * Flush L1 cache.
118	 * ---------------------------------------------
119	 */
120	mov	r0, #DC_OP_CISW
121	bl	dcsw_op_level1
122
123	/* ---------------------------------------------
124	 * Disable the optional ACP.
125	 * ---------------------------------------------
126	 */
127	bl	plat_disable_acp
128
129	/* ---------------------------------------------
130	 * Flush L2 cache.
131	 * ---------------------------------------------
132	 */
133	mov	r0, #DC_OP_CISW
134	bl	dcsw_op_level2
135
136	/* ---------------------------------------------
137	 * Come out of intra cluster coherency
138	 * ---------------------------------------------
139	 */
140	pop	{r12, lr}
141	b	cortex_a32_disable_smp
142endfunc cortex_a32_cluster_pwr_dwn
143
144declare_cpu_ops cortex_a32, CORTEX_A32_MIDR, \
145	cortex_a32_reset_func, \
146	cortex_a32_core_pwr_dwn, \
147	cortex_a32_cluster_pwr_dwn
148