xref: /optee_os/core/arch/arm/include/arm64.h (revision 84431ae3c2986ea4ae4de3e67d17e4c0d2d22517)
1 /*
2  * Copyright (c) 2015, Linaro Limited
3  * All rights reserved.
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions are met:
7  *
8  * 1. Redistributions of source code must retain the above copyright notice,
9  * this list of conditions and the following disclaimer.
10  *
11  * 2. Redistributions in binary form must reproduce the above copyright notice,
12  * this list of conditions and the following disclaimer in the documentation
13  * and/or other materials provided with the distribution.
14  *
15  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
16  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
19  * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
20  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
21  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
22  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
23  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
24  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
25  * POSSIBILITY OF SUCH DAMAGE.
26  */
27 #ifndef ARM64_H
28 #define ARM64_H
29 
30 #ifndef ASM
31 #include <stdint.h>
32 #endif
33 
34 #define SCTLR_M		(1 << 0)
35 #define SCTLR_A		(1 << 1)
36 #define SCTLR_C		(1 << 2)
37 #define SCTLR_SA	(1 << 3)
38 #define SCTLR_I		(1 << 12)
39 
40 #define TTBR_ASID_MASK		0xff
41 #define TTBR_ASID_SHIFT		48
42 
43 #define CLIDR_LOUIS_SHIFT	21
44 #define CLIDR_LOC_SHIFT		24
45 #define CLIDR_FIELD_WIDTH	3
46 
47 #define CSSELR_LEVEL_SHIFT	1
48 
49 #define DAIFBIT_FIQ			(1 << 0)
50 #define DAIFBIT_IRQ			(1 << 1)
51 #define DAIFBIT_ABT			(1 << 2)
52 #define DAIFBIT_DBG			(1 << 3)
53 #define DAIFBIT_ALL			(DAIFBIT_FIQ | DAIFBIT_IRQ | \
54 					 DAIFBIT_ABT | DAIFBIT_DBG)
55 
56 #define DAIF_F_SHIFT		6
57 #define DAIF_F			(1 << 6)
58 #define DAIF_I			(1 << 7)
59 #define DAIF_A			(1 << 8)
60 #define DAIF_D			(1 << 9)
61 #define DAIF_AIF		(DAIF_A | DAIF_I | DAIF_F)
62 
63 #define SPSR_MODE_RW_SHIFT	4
64 #define SPSR_MODE_RW_MASK	0x1
65 #define SPSR_MODE_RW_64		0x0
66 #define SPSR_MODE_RW_32		0x1
67 
68 #define SPSR_64_MODE_SP_SHIFT	0
69 #define SPSR_64_MODE_SP_MASK	0x1
70 #define SPSR_64_MODE_SP_EL0	0x0
71 #define SPSR_64_MODE_SP_ELX	0x1
72 
73 #define SPSR_64_MODE_EL_SHIFT	2
74 #define SPSR_64_MODE_EL_MASK	0x3
75 #define SPSR_64_MODE_EL1	0x1
76 #define SPSR_64_MODE_EL0	0x0
77 
78 #define SPSR_64_DAIF_SHIFT	6
79 #define SPSR_64_DAIF_MASK	0xf
80 
81 #define SPSR_32_AIF_SHIFT	6
82 #define SPSR_32_AIF_MASK	0x7
83 
84 #define SPSR_32_E_SHIFT		9
85 #define SPSR_32_E_MASK		0x1
86 #define SPSR_32_E_LITTLE	0x0
87 #define SPSR_32_E_BIG		0x1
88 
89 #define SPSR_32_T_SHIFT		5
90 #define SPSR_32_T_MASK		0x1
91 #define SPSR_32_T_ARM		0x0
92 #define SPSR_32_T_THUMB		0x1
93 
94 #define SPSR_32_MODE_SHIFT	0
95 #define SPSR_32_MODE_MASK	0xf
96 #define SPSR_32_MODE_USR	0x0
97 
98 
99 #define SPSR_64(el, sp, daif)						\
100 	(SPSR_MODE_RW_64 << SPSR_MODE_RW_SHIFT |			\
101 	((el) & SPSR_64_MODE_EL_MASK) << SPSR_64_MODE_EL_SHIFT |	\
102 	((sp) & SPSR_64_MODE_SP_MASK) << SPSR_64_MODE_SP_SHIFT |	\
103 	((daif) & SPSR_64_DAIF_MASK) << SPSR_64_DAIF_SHIFT)
104 
105 #define SPSR_32(mode, isa, aif)						\
106 	(SPSR_MODE_RW_32 << SPSR_MODE_RW_SHIFT |			\
107 	SPSR_32_E_LITTLE << SPSR_32_E_SHIFT |				\
108 	((mode) & SPSR_32_MODE_MASK) << SPSR_32_MODE_SHIFT |		\
109 	((isa) & SPSR_32_T_MASK) << SPSR_32_T_SHIFT |			\
110 	((aif) & SPSR_32_AIF_MASK) << SPSR_32_AIF_SHIFT)
111 
112 
113 #define TCR_T0SZ_SHIFT		0
114 #define TCR_EPD0		(1 << 7)
115 #define TCR_IRGN0_SHIFT		8
116 #define TCR_ORGN0_SHIFT		10
117 #define TCR_SH0_SHIFT		12
118 #define TCR_T1SZ_SHIFT		16
119 #define TCR_A1			(1 << 22)
120 #define TCR_EPD1		(1 << 23)
121 #define TCR_IRGN1_SHIFT		24
122 #define TCR_ORGN1_SHIFT		26
123 #define TCR_SH1_SHIFT		28
124 #define TCR_EL1_IPS_SHIFT	32
125 #define TCR_TG1_4KB		(2ull << 30)
126 
127 /* Normal memory, Inner/Outer Non-cacheable */
128 #define TCR_XRGNX_NC		0x0
129 /* Normal memory, Inner/Outer Write-Back Write-Allocate Cacheable */
130 #define TCR_XRGNX_WB		0x1
131 /* Normal memory, Inner/Outer Write-Through Cacheable */
132 #define TCR_XRGNX_WT		0x2
133 /* Normal memory, Inner/Outer Write-Back no Write-Allocate Cacheable */
134 #define TCR_XRGNX_WBWA	0x3
135 
136 /* Non-shareable */
137 #define TCR_SHX_NSH		0x0
138 /* Outer Shareable */
139 #define TCR_SHX_OSH		0x2
140 /* Inner Shareable */
141 #define TCR_SHX_ISH		0x3
142 
143 #define ESR_EC_SHIFT		26
144 #define ESR_EC_MASK		0x3f
145 
146 #define ESR_EC_UNKNOWN		0x00
147 #define ESR_EC_WFI		0x01
148 #define ESR_EC_AARCH32_CP15_32	0x03
149 #define ESR_EC_AARCH32_CP15_64	0x04
150 #define ESR_EC_AARCH32_CP14_MR	0x05
151 #define ESR_EC_AARCH32_CP14_LS	0x06
152 #define ESR_EC_FP_ASIMD		0x07
153 #define ESR_EC_AARCH32_CP10_ID	0x08
154 #define ESR_EC_AARCH32_CP14_64	0x0c
155 #define ESR_EC_ILLEGAL		0x0e
156 #define ESR_EC_AARCH32_SVC	0x11
157 #define ESR_EC_AARCH64_SVC	0x15
158 #define ESR_EC_AARCH64_SYS	0x18
159 #define ESR_EC_IABT_EL0		0x20
160 #define ESR_EC_IABT_EL1		0x21
161 #define ESR_EC_PC_ALIGN		0x22
162 #define ESR_EC_DABT_EL0		0x24
163 #define ESR_EC_DABT_EL1		0x25
164 #define ESR_EC_SP_ALIGN		0x26
165 #define ESR_EC_AARCH32_FP	0x28
166 #define ESR_EC_AARCH64_FP	0x2c
167 #define ESR_EC_SERROR		0x2f
168 #define ESR_EC_BREAKPT_EL0	0x30
169 #define ESR_EC_BREAKPT_EL1	0x31
170 #define ESR_EC_SOFTSTP_EL0	0x32
171 #define ESR_EC_SOFTSTP_EL1	0x33
172 #define ESR_EC_WATCHPT_EL0	0x34
173 #define ESR_EC_WATCHPT_EL1	0x35
174 #define ESR_EC_AARCH32_BKPT	0x38
175 #define ESR_EC_AARCH64_BRK	0x3c
176 
177 /* Combined defines for DFSC and IFSC */
178 #define ESR_FSC_MASK		0x3f
179 #define ESR_FSC_TRANS_L0	0x04
180 #define ESR_FSC_TRANS_L1	0x05
181 #define ESR_FSC_TRANS_L2	0x06
182 #define ESR_FSC_TRANS_L3	0x07
183 #define ESR_FSC_ACCF_L1		0x09
184 #define ESR_FSC_ACCF_L2		0x0a
185 #define ESR_FSC_ACCF_L3		0x0b
186 #define ESR_FSC_PERMF_L1	0x0d
187 #define ESR_FSC_PERMF_L2	0x0e
188 #define ESR_FSC_PERMF_L3	0x0f
189 #define ESR_FSC_ALIGN		0x21
190 
191 #define CPACR_EL1_FPEN_SHIFT	20
192 #define CPACR_EL1_FPEN_MASK	0x3
193 #define CPACR_EL1_FPEN_NONE	0x0
194 #define CPACR_EL1_FPEN_EL1	0x1
195 #define CPACR_EL1_FPEN_EL0EL1	0x3
196 #define CPACR_EL1_FPEN(x)	((x) >> CPACR_EL1_FPEN_SHIFT \
197 				      & CPACR_EL1_FPEN_MASK)
198 
199 #ifndef ASM
200 static inline uint32_t read_daif(void)
201 {
202 	uint32_t val;
203 
204 	asm volatile("mrs %[val], daif"
205 			: [val] "=r" (val));
206 	return val;
207 }
208 
209 static inline void write_daif(uint32_t val)
210 {
211 	asm volatile("msr daif, %[val]"
212 			: : [val] "r" (val));
213 }
214 
215 static inline void write_mair_el1(uint64_t val)
216 {
217 	asm volatile("msr mair_el1, %[val]"
218 			: : [val] "r" (val));
219 }
220 
221 static inline uint64_t read_tcr_el1(void)
222 {
223 	uint64_t val;
224 
225 	asm volatile("mrs %[val], tcr_el1"
226 			: [val] "=r" (val));
227 	return val;
228 }
229 
230 static inline void write_tcr_el1(uint64_t val)
231 {
232 	asm volatile("msr tcr_el1, %[val]"
233 			: : [val] "r" (val));
234 }
235 
236 static inline uint64_t read_ttbr0_el1(void)
237 {
238 	uint64_t val;
239 
240 	asm volatile("mrs %[val], ttbr0_el1"
241 			: [val] "=r" (val));
242 	return val;
243 }
244 
245 static inline void write_ttbr0_el1(uint64_t val)
246 {
247 	asm volatile("msr ttbr0_el1, %[val]"
248 			: : [val] "r" (val));
249 }
250 
251 static inline uint64_t read_ttbr1_el1(void)
252 {
253 	uint64_t val;
254 
255 	asm volatile("mrs %[val], ttbr1_el1"
256 			: [val] "=r" (val));
257 	return val;
258 }
259 
260 static inline void write_ttbr1_el1(uint64_t val)
261 {
262 	asm volatile("msr ttbr1_el1, %[val]"
263 			: : [val] "r" (val));
264 }
265 
266 static inline uint64_t read_mpidr_el1(void)
267 {
268 	uint64_t val;
269 
270 	asm volatile("mrs %[val], mpidr_el1"
271 			: [val] "=r"(val));
272 	return val;
273 }
274 
275 static inline uint32_t read_contextidr_el1(void)
276 {
277 	uint32_t val;
278 
279 	asm volatile("mrs %[val], contextidr_el1"
280 			: [val] "=r"(val));
281 	return val;
282 }
283 
284 static inline uint64_t read_cntpct(void)
285 {
286 	uint64_t val;
287 
288 	asm volatile("mrs %[val], cntpct_el0"
289 			: [val] "=r"(val));
290 	return val;
291 }
292 
293 static inline uint32_t read_cntfrq(void)
294 {
295 	uint32_t frq;
296 
297 	asm volatile("mrs %[frq], cntfrq_el0"
298 			: [frq] "=r"(frq));
299 	return frq;
300 }
301 
302 static inline uint64_t read_esr_el1(void)
303 {
304 	uint64_t val;
305 
306 	asm volatile("mrs %[val], esr_el1"
307 			: [val] "=r"(val));
308 	return val;
309 }
310 
311 static inline uint64_t read_far_el1(void)
312 {
313 	uint64_t val;
314 
315 	asm volatile("mrs %[val], far_el1"
316 			: [val] "=r"(val));
317 	return val;
318 }
319 
320 static inline void isb(void)
321 {
322 	asm volatile ("isb");
323 }
324 
325 static inline void dsb(void)
326 {
327 	asm volatile ("dsb sy");
328 }
329 
330 static inline uint32_t read_cpacr_el1(void)
331 {
332 	uint32_t val;
333 
334 	asm volatile("mrs %0, cpacr_el1" : "=r" (val));
335 	return val;
336 }
337 
338 static inline void write_cpacr_el1(uint32_t val)
339 {
340 	asm volatile("msr cpacr_el1, %0" : : "r" (val));
341 }
342 
343 static inline uint32_t read_fpcr(void)
344 {
345 	uint32_t val;
346 
347 	asm volatile("mrs %0, fpcr" : "=r" (val));
348 	return val;
349 }
350 
351 static inline void write_fpcr(uint32_t val)
352 {
353 	asm volatile("msr fpcr, %0" : : "r" (val));
354 }
355 
356 static inline uint32_t read_fpsr(void)
357 {
358 	uint32_t val;
359 
360 	asm volatile("mrs %0, fpsr" : "=r" (val));
361 	return val;
362 }
363 
364 static inline void write_fpsr(uint32_t val)
365 {
366 	asm volatile("msr fpsr, %0" : : "r" (val));
367 }
368 #endif /*ASM*/
369 
370 #endif /*ARM64_H*/
371 
372