xref: /optee_os/core/arch/arm/include/arm64.h (revision 31908aeac446be4859fe9dc98dc4e1e0c13b528c)
1 /*
2  * Copyright (c) 2015, Linaro Limited
3  * All rights reserved.
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions are met:
7  *
8  * 1. Redistributions of source code must retain the above copyright notice,
9  * this list of conditions and the following disclaimer.
10  *
11  * 2. Redistributions in binary form must reproduce the above copyright notice,
12  * this list of conditions and the following disclaimer in the documentation
13  * and/or other materials provided with the distribution.
14  *
15  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
16  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
19  * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
20  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
21  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
22  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
23  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
24  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
25  * POSSIBILITY OF SUCH DAMAGE.
26  */
27 #ifndef ARM64_H
28 #define ARM64_H
29 
30 #ifndef ASM
31 #include <stdint.h>
32 #endif
33 
34 #define SCTLR_M		(1 << 0)
35 #define SCTLR_A		(1 << 1)
36 #define SCTLR_C		(1 << 2)
37 #define SCTLR_SA	(1 << 3)
38 #define SCTLR_I		(1 << 12)
39 
40 #define TTBR_ASID_MASK		0xff
41 #define TTBR_ASID_SHIFT		48
42 
43 #define CLIDR_LOUIS_SHIFT	21
44 #define CLIDR_LOC_SHIFT		24
45 #define CLIDR_FIELD_WIDTH	3
46 
47 #define CSSELR_LEVEL_SHIFT	1
48 
49 #define DAIFBIT_FIQ			(1 << 0)
50 #define DAIFBIT_IRQ			(1 << 1)
51 #define DAIFBIT_ABT			(1 << 2)
52 #define DAIFBIT_DBG			(1 << 3)
53 #define DAIFBIT_ALL			(DAIFBIT_FIQ | DAIFBIT_IRQ | \
54 					 DAIFBIT_ABT | DAIFBIT_DBG)
55 
56 #define DAIF_F_SHIFT		6
57 #define DAIF_F			(1 << 6)
58 #define DAIF_I			(1 << 7)
59 #define DAIF_A			(1 << 8)
60 #define DAIF_D			(1 << 9)
61 #define DAIF_AIF		(DAIF_A | DAIF_I | DAIF_F)
62 
63 #define SPSR_MODE_RW_SHIFT	4
64 #define SPSR_MODE_RW_MASK	0x1
65 #define SPSR_MODE_RW_64		0x0
66 #define SPSR_MODE_RW_32		0x1
67 
68 #define SPSR_64_MODE_SP_SHIFT	0
69 #define SPSR_64_MODE_SP_MASK	0x1
70 #define SPSR_64_MODE_SP_EL0	0x0
71 #define SPSR_64_MODE_SP_ELX	0x1
72 
73 #define SPSR_64_MODE_EL_SHIFT	2
74 #define SPSR_64_MODE_EL_MASK	0x3
75 #define SPSR_64_MODE_EL1	0x1
76 #define SPSR_64_MODE_EL0	0x0
77 
78 #define SPSR_64_DAIF_SHIFT	6
79 #define SPSR_64_DAIF_MASK	0xf
80 
81 #define SPSR_32_AIF_SHIFT	6
82 #define SPSR_32_AIF_MASK	0x7
83 
84 #define SPSR_32_E_SHIFT		9
85 #define SPSR_32_E_MASK		0x1
86 #define SPSR_32_E_LITTLE	0x0
87 #define SPSR_32_E_BIG		0x1
88 
89 #define SPSR_32_T_SHIFT		5
90 #define SPSR_32_T_MASK		0x1
91 #define SPSR_32_T_ARM		0x0
92 #define SPSR_32_T_THUMB		0x1
93 
94 #define SPSR_32_MODE_SHIFT	0
95 #define SPSR_32_MODE_MASK	0xf
96 #define SPSR_32_MODE_USR	0x0
97 
98 
99 #define SPSR_64(el, sp, daif)						\
100 	(SPSR_MODE_RW_64 << SPSR_MODE_RW_SHIFT |			\
101 	((el) & SPSR_64_MODE_EL_MASK) << SPSR_64_MODE_EL_SHIFT |	\
102 	((sp) & SPSR_64_MODE_SP_MASK) << SPSR_64_MODE_SP_SHIFT |	\
103 	((daif) & SPSR_64_DAIF_MASK) << SPSR_64_DAIF_SHIFT)
104 
105 #define SPSR_32(mode, isa, aif)						\
106 	(SPSR_MODE_RW_32 << SPSR_MODE_RW_SHIFT |			\
107 	SPSR_32_E_LITTLE << SPSR_32_E_SHIFT |				\
108 	((mode) & SPSR_32_MODE_MASK) << SPSR_32_MODE_SHIFT |		\
109 	((isa) & SPSR_32_T_MASK) << SPSR_32_T_SHIFT |			\
110 	((aif) & SPSR_32_AIF_MASK) << SPSR_32_AIF_SHIFT)
111 
112 
113 #define TCR_T0SZ_SHIFT		0
114 #define TCR_EPD0		(1 << 7)
115 #define TCR_IRGN0_SHIFT		8
116 #define TCR_ORGN0_SHIFT		10
117 #define TCR_SH0_SHIFT		12
118 #define TCR_T1SZ_SHIFT		16
119 #define TCR_A1			(1 << 22)
120 #define TCR_EPD1		(1 << 23)
121 #define TCR_IRGN1_SHIFT		24
122 #define TCR_ORGN1_SHIFT		26
123 #define TCR_SH1_SHIFT		28
124 #define TCR_EL1_IPS_SHIFT	32
125 #define TCR_TG1_4KB		(2ull << 30)
126 
127 /* Normal memory, Inner/Outer Non-cacheable */
128 #define TCR_XRGNX_NC		0x0
129 /* Normal memory, Inner/Outer Write-Back Write-Allocate Cacheable */
130 #define TCR_XRGNX_WB		0x1
131 /* Normal memory, Inner/Outer Write-Through Cacheable */
132 #define TCR_XRGNX_WT		0x2
133 /* Normal memory, Inner/Outer Write-Back no Write-Allocate Cacheable */
134 #define TCR_XRGNX_WBWA	0x3
135 
136 /* Non-shareable */
137 #define TCR_SHX_NSH		0x0
138 /* Outer Shareable */
139 #define TCR_SHX_OSH		0x2
140 /* Inner Shareable */
141 #define TCR_SHX_ISH		0x3
142 
143 #define ESR_EC_SHIFT		26
144 #define ESR_EC_MASK		0x3f
145 
146 #define ESR_EC_UNKNOWN		0x00
147 #define ESR_EC_WFI		0x01
148 #define ESR_EC_AARCH32_CP15_32	0x03
149 #define ESR_EC_AARCH32_CP15_64	0x04
150 #define ESR_EC_AARCH32_CP14_MR	0x05
151 #define ESR_EC_AARCH32_CP14_LS	0x06
152 #define ESR_EC_FP_ASIMD		0x07
153 #define ESR_EC_AARCH32_CP10_ID	0x08
154 #define ESR_EC_AARCH32_CP14_64	0x0c
155 #define ESR_EC_ILLEGAL		0x0e
156 #define ESR_EC_AARCH32_SVC	0x11
157 #define ESR_EC_AARCH64_SVC	0x15
158 #define ESR_EC_AARCH64_SYS	0x18
159 #define ESR_EC_IABT_EL0		0x20
160 #define ESR_EC_IABT_EL1		0x21
161 #define ESR_EC_PC_ALIGN		0x22
162 #define ESR_EC_DABT_EL0		0x24
163 #define ESR_EC_DABT_EL1		0x25
164 #define ESR_EC_SP_ALIGN		0x26
165 #define ESR_EC_AARCH32_FP	0x28
166 #define ESR_EC_AARCH64_FP	0x2c
167 #define ESR_EC_SERROR		0x2f
168 #define ESR_EC_BREAKPT_EL0	0x30
169 #define ESR_EC_BREAKPT_EL1	0x31
170 #define ESR_EC_SOFTSTP_EL0	0x32
171 #define ESR_EC_SOFTSTP_EL1	0x33
172 #define ESR_EC_WATCHPT_EL0	0x34
173 #define ESR_EC_WATCHPT_EL1	0x35
174 #define ESR_EC_AARCH32_BKPT	0x38
175 #define ESR_EC_AARCH64_BRK	0x3c
176 
177 /* Combined defines for DFSC and IFSC */
178 #define ESR_FSC_MASK		0x3f
179 #define ESR_FSC_TRANS_L0	0x04
180 #define ESR_FSC_TRANS_L1	0x05
181 #define ESR_FSC_TRANS_L2	0x06
182 #define ESR_FSC_TRANS_L3	0x07
183 #define ESR_FSC_ACCF_L1		0x09
184 #define ESR_FSC_ACCF_L2		0x0a
185 #define ESR_FSC_ACCF_L3		0x0b
186 #define ESR_FSC_PERMF_L1	0x0d
187 #define ESR_FSC_PERMF_L2	0x0e
188 #define ESR_FSC_PERMF_L3	0x0f
189 #define ESR_FSC_ALIGN		0x21
190 
191 #ifndef ASM
192 static inline uint32_t read_daif(void)
193 {
194 	uint32_t val;
195 
196 	asm volatile("mrs %[val], daif"
197 			: [val] "=r" (val));
198 	return val;
199 }
200 
201 static inline void write_daif(uint32_t val)
202 {
203 	asm volatile("msr daif, %[val]"
204 			: : [val] "r" (val));
205 }
206 
207 static inline void write_mair_el1(uint64_t val)
208 {
209 	asm volatile("msr mair_el1, %[val]"
210 			: : [val] "r" (val));
211 }
212 
213 static inline uint64_t read_tcr_el1(void)
214 {
215 	uint64_t val;
216 
217 	asm volatile("mrs %[val], tcr_el1"
218 			: [val] "=r" (val));
219 	return val;
220 }
221 
222 static inline void write_tcr_el1(uint64_t val)
223 {
224 	asm volatile("msr tcr_el1, %[val]"
225 			: : [val] "r" (val));
226 }
227 
228 static inline uint64_t read_ttbr0_el1(void)
229 {
230 	uint64_t val;
231 
232 	asm volatile("mrs %[val], ttbr0_el1"
233 			: [val] "=r" (val));
234 	return val;
235 }
236 
237 static inline void write_ttbr0_el1(uint64_t val)
238 {
239 	asm volatile("msr ttbr0_el1, %[val]"
240 			: : [val] "r" (val));
241 }
242 
243 static inline uint64_t read_ttbr1_el1(void)
244 {
245 	uint64_t val;
246 
247 	asm volatile("mrs %[val], ttbr1_el1"
248 			: [val] "=r" (val));
249 	return val;
250 }
251 
252 static inline void write_ttbr1_el1(uint64_t val)
253 {
254 	asm volatile("msr ttbr1_el1, %[val]"
255 			: : [val] "r" (val));
256 }
257 
258 static inline uint64_t read_mpidr_el1(void)
259 {
260 	uint64_t val;
261 
262 	asm volatile("mrs %[val], mpidr_el1"
263 			: [val] "=r"(val));
264 	return val;
265 }
266 
267 static inline uint32_t read_contextidr_el1(void)
268 {
269 	uint32_t val;
270 
271 	asm volatile("mrs %[val], contextidr_el1"
272 			: [val] "=r"(val));
273 	return val;
274 }
275 
276 static inline uint64_t read_cntpct(void)
277 {
278 	uint64_t val;
279 
280 	asm volatile("mrs %[val], cntpct_el0"
281 			: [val] "=r"(val));
282 	return val;
283 }
284 
285 static inline uint32_t read_cntfrq(void)
286 {
287 	uint32_t frq;
288 
289 	asm volatile("mrs %[frq], cntfrq_el0"
290 			: [frq] "=r"(frq));
291 	return frq;
292 }
293 
294 static inline uint64_t read_esr_el1(void)
295 {
296 	uint64_t val;
297 
298 	asm volatile("mrs %[val], esr_el1"
299 			: [val] "=r"(val));
300 	return val;
301 }
302 
303 static inline uint64_t read_far_el1(void)
304 {
305 	uint64_t val;
306 
307 	asm volatile("mrs %[val], far_el1"
308 			: [val] "=r"(val));
309 	return val;
310 }
311 
312 static inline void isb(void)
313 {
314 	asm volatile ("isb");
315 }
316 
317 static inline void dsb(void)
318 {
319 	asm volatile ("dsb sy");
320 }
321 #endif /*ASM*/
322 
323 #endif /*ARM64_H*/
324 
325