xref: /optee_os/core/arch/arm/include/arm32.h (revision 3015f5643909293347670f865343239e9595559d)
1 /*
2  * Copyright (c) 2014, STMicroelectronics International N.V.
3  * All rights reserved.
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions are met:
7  *
8  * 1. Redistributions of source code must retain the above copyright notice,
9  * this list of conditions and the following disclaimer.
10  *
11  * 2. Redistributions in binary form must reproduce the above copyright notice,
12  * this list of conditions and the following disclaimer in the documentation
13  * and/or other materials provided with the distribution.
14  *
15  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
16  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
19  * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
20  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
21  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
22  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
23  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
24  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
25  * POSSIBILITY OF SUCH DAMAGE.
26  */
27 
28 #ifndef ARM32_H
29 #define ARM32_H
30 
31 #ifndef ASM
32 #include <stdint.h>
33 #endif
34 
35 #define CPSR_MODE_MASK	ARM32_CPSR_MODE_MASK
36 #define CPSR_MODE_USR	ARM32_CPSR_MODE_USR
37 #define CPSR_MODE_FIQ	ARM32_CPSR_MODE_FIQ
38 #define CPSR_MODE_IRQ	ARM32_CPSR_MODE_IRQ
39 #define CPSR_MODE_SVC	ARM32_CPSR_MODE_SVC
40 #define CPSR_MODE_MON	ARM32_CPSR_MODE_MON
41 #define CPSR_MODE_ABT	ARM32_CPSR_MODE_ABT
42 #define CPSR_MODE_UND	ARM32_CPSR_MODE_UND
43 #define CPSR_MODE_SYS	ARM32_CPSR_MODE_SYS
44 
45 #define CPSR_T		ARM32_CPSR_T
46 #define CPSR_F_SHIFT	ARM32_CPSR_F_SHIFT
47 #define CPSR_F		ARM32_CPSR_F
48 #define CPSR_I		ARM32_CPSR_I
49 #define CPSR_A		ARM32_CPSR_A
50 #define CPSR_FIA	ARM32_CPSR_FIA
51 #define CPSR_IT_MASK	ARM32_CPSR_IT_MASK
52 #define CPSR_IT_MASK1	ARM32_CPSR_IT_MASK1
53 #define CPSR_IT_MASK2	ARM32_CPSR_IT_MASK2
54 
55 #define SCR_NS		(1 << 0)
56 #define SCR_IRQ		(1 << 1)
57 #define SCR_FIQ		(1 << 2)
58 #define SCR_EA		(1 << 3)
59 #define SCR_FW		(1 << 4)
60 #define SCR_AW		(1 << 5)
61 #define SCR_NET		(1 << 6)
62 #define SCR_SCD		(1 << 7)
63 #define SCR_HCE		(1 << 8)
64 #define SCR_SIF		(1 << 9)
65 
66 #define SCTLR_M		(1 << 0)
67 #define SCTLR_A		(1 << 1)
68 #define SCTLR_C		(1 << 2)
69 #define SCTLR_CP15BEN	(1 << 5)
70 #define SCTLR_SW	(1 << 10)
71 #define SCTLR_Z		(1 << 11)
72 #define SCTLR_I		(1 << 12)
73 #define SCTLR_V		(1 << 13)
74 #define SCTLR_RR	(1 << 14)
75 #define SCTLR_HA	(1 << 17)
76 #define SCTLR_WXN	(1 << 19)
77 #define SCTLR_UWXN	(1 << 20)
78 #define SCTLR_FI	(1 << 21)
79 #define SCTLR_VE	(1 << 24)
80 #define SCTLR_EE	(1 << 25)
81 #define SCTLR_NMFI	(1 << 26)
82 #define SCTLR_TRE	(1 << 28)
83 #define SCTLR_AFE	(1 << 29)
84 #define SCTLR_TE	(1 << 30)
85 
86 #define ACTLR_SMP	(1 << 6)
87 #define ACTLR_DODMBS	(1 << 10)
88 #define ACTLR_L2RADIS	(1 << 11)
89 #define ACTLR_L1RADIS	(1 << 12)
90 #define ACTLR_L1PCTL	(1 << 13)
91 #define ACTLR_DDVM	(1 << 15)
92 #define ACTLR_DDI	(1 << 28)
93 
94 #define NSACR_CP10	(1 << 10)
95 #define NSACR_CP11	(1 << 11)
96 #define NSACR_NSD32DIS	(1 << 14)
97 #define NSACR_NSASEDIS	(1 << 15)
98 #define NSACR_NS_L2ERR	(1 << 17)
99 #define NSACR_NS_SMP	(1 << 18)
100 
101 #define CPACR_ASEDIS	(1 << 31)
102 #define CPACR_D32DIS	(1 << 30)
103 #define CPACR_CP(co_proc, access)	((access) << ((co_proc) * 2))
104 #define CPACR_CP_ACCESS_DENIED		0x0
105 #define CPACR_CP_ACCESS_PL1_ONLY	0x1
106 #define CPACR_CP_ACCESS_FULL		0x3
107 
108 
109 #define DACR_DOMAIN(num, perm)		((perm) << ((num) * 2))
110 #define DACR_DOMAIN_PERM_NO_ACCESS	0x0
111 #define DACR_DOMAIN_PERM_CLIENT		0x1
112 #define DACR_DOMAIN_PERM_MANAGER	0x3
113 
114 /*
115  * TTBCR has different register layout if LPAE is enabled or not.
116  * TTBCR.EAE == 0 => LPAE is not enabled
117  * TTBCR.EAE == 1 => LPAE is enabled
118  */
119 #define TTBCR_EAE	(1 << 31)
120 
121 /* When TTBCR.EAE == 0 */
122 #define TTBCR_PD0	(1 << 4)
123 #define TTBCR_PD1	(1 << 5)
124 
125 /* When TTBCR.EAE == 1 */
126 #define TTBCR_T0SZ_SHIFT	0
127 #define TTBCR_EPD0		(1 << 7)
128 #define TTBCR_IRGN0_SHIFT	8
129 #define TTBCR_ORGN0_SHIFT	10
130 #define TTBCR_SH0_SHIFT		12
131 #define TTBCR_T1SZ_SHIFT	16
132 #define TTBCR_A1		(1 << 22)
133 #define TTBCR_EPD1		(1 << 23)
134 #define TTBCR_IRGN1_SHIFT	24
135 #define TTBCR_ORGN1_SHIFT	26
136 #define TTBCR_SH1_SHIFT		28
137 
138 /* Normal memory, Inner/Outer Non-cacheable */
139 #define TTBCR_XRGNX_NC		0x0
140 /* Normal memory, Inner/Outer Write-Back Write-Allocate Cacheable */
141 #define TTBCR_XRGNX_WB		0x1
142 /* Normal memory, Inner/Outer Write-Through Cacheable */
143 #define TTBCR_XRGNX_WT		0x2
144 /* Normal memory, Inner/Outer Write-Back no Write-Allocate Cacheable */
145 #define TTBCR_XRGNX_WBWA	0x3
146 
147 /* Non-shareable */
148 #define TTBCR_SHX_NSH		0x0
149 /* Outer Shareable */
150 #define TTBCR_SHX_OSH		0x2
151 /* Inner Shareable */
152 #define TTBCR_SHX_ISH		0x3
153 
154 #define TTBR_ASID_MASK		0xff
155 #define TTBR_ASID_SHIFT		48
156 
157 
158 #define FSR_LPAE		(1 << 9)
159 #define FSR_WNR			(1 << 11)
160 
161 /* Valid if FSR.LPAE is 1 */
162 #define FSR_STATUS_MASK		((1 << 6) - 1)
163 
164 /* Valid if FSR.LPAE is 0 */
165 #define FSR_FS_MASK		((1 << 10) | ((1 << 4) - 1))
166 
167 #ifndef ASM
168 static inline uint32_t read_mpidr(void)
169 {
170 	uint32_t mpidr;
171 
172 	asm volatile ("mrc	p15, 0, %[mpidr], c0, c0, 5"
173 			: [mpidr] "=r" (mpidr)
174 	);
175 
176 	return mpidr;
177 }
178 
179 static inline uint32_t read_sctlr(void)
180 {
181 	uint32_t sctlr;
182 
183 	asm volatile ("mrc	p15, 0, %[sctlr], c1, c0, 0"
184 			: [sctlr] "=r" (sctlr)
185 	);
186 
187 	return sctlr;
188 }
189 
190 static inline void write_sctlr(uint32_t sctlr)
191 {
192 	asm volatile ("mcr	p15, 0, %[sctlr], c1, c0, 0"
193 			: : [sctlr] "r" (sctlr)
194 	);
195 }
196 
197 static inline uint32_t read_cpacr(void)
198 {
199 	uint32_t cpacr;
200 
201 	asm volatile ("mrc	p15, 0, %[cpacr], c1, c0, 2"
202 			: [cpacr] "=r" (cpacr)
203 	);
204 
205 	return cpacr;
206 }
207 
208 static inline void write_cpacr(uint32_t cpacr)
209 {
210 	asm volatile ("mcr	p15, 0, %[cpacr], c1, c0, 2"
211 			: : [cpacr] "r" (cpacr)
212 	);
213 }
214 
215 static inline void write_ttbr0(uint32_t ttbr0)
216 {
217 	asm volatile ("mcr	p15, 0, %[ttbr0], c2, c0, 0"
218 			: : [ttbr0] "r" (ttbr0)
219 	);
220 }
221 
222 static inline void write_ttbr0_64bit(uint64_t ttbr0)
223 {
224 	asm volatile ("mcrr	p15, 0, %Q[ttbr0], %R[ttbr0], c2"
225 			: : [ttbr0] "r" (ttbr0)
226 	);
227 }
228 
229 static inline uint32_t read_ttbr0(void)
230 {
231 	uint32_t ttbr0;
232 
233 	asm volatile ("mrc	p15, 0, %[ttbr0], c2, c0, 0"
234 			: [ttbr0] "=r" (ttbr0)
235 	);
236 
237 	return ttbr0;
238 }
239 
240 static inline uint64_t read_ttbr0_64bit(void)
241 {
242 	uint64_t ttbr0;
243 
244 	asm volatile ("mrrc	p15, 0, %Q[ttbr0], %R[ttbr0], c2"
245 			: [ttbr0] "=r" (ttbr0)
246 	);
247 
248 	return ttbr0;
249 }
250 
251 static inline void write_ttbr1(uint32_t ttbr1)
252 {
253 	asm volatile ("mcr	p15, 0, %[ttbr1], c2, c0, 1"
254 			: : [ttbr1] "r" (ttbr1)
255 	);
256 }
257 
258 static inline void write_ttbr1_64bit(uint64_t ttbr1)
259 {
260 	asm volatile ("mcrr	p15, 1, %Q[ttbr1], %R[ttbr1], c2"
261 			: : [ttbr1] "r" (ttbr1)
262 	);
263 }
264 
265 static inline uint32_t read_ttbr1(void)
266 {
267 	uint32_t ttbr1;
268 
269 	asm volatile ("mrc	p15, 0, %[ttbr1], c2, c0, 1"
270 			: [ttbr1] "=r" (ttbr1)
271 	);
272 
273 	return ttbr1;
274 }
275 
276 
277 static inline void write_ttbcr(uint32_t ttbcr)
278 {
279 	asm volatile ("mcr	p15, 0, %[ttbcr], c2, c0, 2"
280 			: : [ttbcr] "r" (ttbcr)
281 	);
282 }
283 
284 static inline uint32_t read_ttbcr(void)
285 {
286 	uint32_t ttbcr;
287 
288 	asm volatile ("mrc	p15, 0, %[ttbcr], c2, c0, 2"
289 			: [ttbcr] "=r" (ttbcr)
290 	);
291 
292 	return ttbcr;
293 }
294 
295 static inline void write_dacr(uint32_t dacr)
296 {
297 	asm volatile ("mcr	p15, 0, %[dacr], c3, c0, 0"
298 			: : [dacr] "r" (dacr)
299 	);
300 }
301 
302 static inline uint32_t read_ifar(void)
303 {
304 	uint32_t ifar;
305 
306 	asm volatile ("mrc	p15, 0, %[ifar], c6, c0, 2"
307 			: [ifar] "=r" (ifar)
308 	);
309 
310 	return ifar;
311 }
312 
313 static inline uint32_t read_dfar(void)
314 {
315 	uint32_t dfar;
316 
317 	asm volatile ("mrc	p15, 0, %[dfar], c6, c0, 0"
318 			: [dfar] "=r" (dfar)
319 	);
320 
321 	return dfar;
322 }
323 
324 static inline uint32_t read_dfsr(void)
325 {
326 	uint32_t dfsr;
327 
328 	asm volatile ("mrc	p15, 0, %[dfsr], c5, c0, 0"
329 			: [dfsr] "=r" (dfsr)
330 	);
331 
332 	return dfsr;
333 }
334 
335 static inline uint32_t read_ifsr(void)
336 {
337 	uint32_t ifsr;
338 
339 	asm volatile ("mrc	p15, 0, %[ifsr], c5, c0, 1"
340 			: [ifsr] "=r" (ifsr)
341 	);
342 
343 	return ifsr;
344 }
345 
346 
347 
348 static inline void isb(void)
349 {
350 	asm volatile ("isb");
351 }
352 
353 static inline void dsb(void)
354 {
355 	asm volatile ("dsb");
356 }
357 
358 static inline void write_ats1cpw(uint32_t va)
359 {
360 	asm volatile ("mcr	p15, 0, %[va], c7, c8, 1"
361 			: : [va] "r" (va)
362 	);
363 }
364 
365 static inline uint32_t read_par(void)
366 {
367 	uint32_t par;
368 
369 	asm volatile ("mrc	p15, 0, %[par], c7, c4, 0"
370 			: [par] "=r" (par)
371 	);
372 	return par;
373 }
374 
375 static inline void write_mair0(uint32_t mair0)
376 {
377 	asm volatile ("mcr	p15, 0, %[mair0], c10, c2, 0"
378 			: : [mair0] "r" (mair0)
379 	);
380 }
381 
382 static inline void write_prrr(uint32_t prrr)
383 {
384 	/*
385 	 * Same physical register as MAIR0.
386 	 *
387 	 * When an implementation includes the Large Physical Address
388 	 * Extension, and address translation is using the Long-descriptor
389 	 * translation table formats, MAIR0 replaces the PRRR
390 	 */
391 	write_mair0(prrr);
392 }
393 
394 static inline void write_mair1(uint32_t mair1)
395 {
396 	asm volatile ("mcr	p15, 0, %[mair1], c10, c2, 1"
397 			: : [mair1] "r" (mair1)
398 	);
399 }
400 
401 static inline void write_nmrr(uint32_t nmrr)
402 {
403 	/*
404 	 * Same physical register as MAIR1.
405 	 *
406 	 * When an implementation includes the Large Physical Address
407 	 * Extension, and address translation is using the Long-descriptor
408 	 * translation table formats, MAIR1 replaces the NMRR
409 	 */
410 	write_mair1(nmrr);
411 }
412 
413 static inline uint32_t read_contextidr(void)
414 {
415 	uint32_t contextidr;
416 
417 	asm volatile ("mrc	p15, 0, %[contextidr], c13, c0, 1"
418 			: [contextidr] "=r" (contextidr)
419 	);
420 
421 	return contextidr;
422 }
423 
424 static inline void write_contextidr(uint32_t contextidr)
425 {
426 	asm volatile ("mcr	p15, 0, %[contextidr], c13, c0, 1"
427 			: : [contextidr] "r" (contextidr)
428 	);
429 }
430 
431 static inline uint32_t read_cpsr(void)
432 {
433 	uint32_t cpsr;
434 
435 	asm volatile ("mrs	%[cpsr], cpsr"
436 			: [cpsr] "=r" (cpsr)
437 	);
438 	return cpsr;
439 }
440 
441 static inline void write_cpsr(uint32_t cpsr)
442 {
443 	asm volatile ("msr	cpsr_fsxc, %[cpsr]"
444 			: : [cpsr] "r" (cpsr)
445 	);
446 }
447 
448 static inline uint32_t read_spsr(void)
449 {
450 	uint32_t spsr;
451 
452 	asm volatile ("mrs	%[spsr], spsr"
453 			: [spsr] "=r" (spsr)
454 	);
455 	return spsr;
456 }
457 
458 static inline uint32_t read_actlr(void)
459 {
460 	uint32_t actlr;
461 
462 	asm volatile ("mrc	p15, 0, %[actlr], c1, c0, 1"
463 			: [actlr] "=r" (actlr)
464 	);
465 
466 	return actlr;
467 }
468 
469 static inline void write_actlr(uint32_t actlr)
470 {
471 	asm volatile ("mcr	p15, 0, %[actlr], c1, c0, 1"
472 			: : [actlr] "r" (actlr)
473 	);
474 }
475 
476 static inline uint32_t read_nsacr(void)
477 {
478 	uint32_t nsacr;
479 
480 	asm volatile ("mrc	p15, 0, %[nsacr], c1, c1, 2"
481 			: [nsacr] "=r" (nsacr)
482 	);
483 
484 	return nsacr;
485 }
486 
487 static inline void write_nsacr(uint32_t nsacr)
488 {
489 	asm volatile ("mcr	p15, 0, %[nsacr], c1, c1, 2"
490 			: : [nsacr] "r" (nsacr)
491 	);
492 }
493 
494 static inline uint64_t read_cntpct(void)
495 {
496 	uint64_t val;
497 
498 	asm volatile("mrrc p15, 0, %Q0, %R0, c14" : "=r" (val));
499 	return val;
500 }
501 
502 static inline uint32_t read_cntfrq(void)
503 {
504 	uint32_t frq;
505 
506 	asm volatile("mrc p15, 0, %0, c14, c0, 0" : "=r" (frq));
507 	return frq;
508 }
509 #endif /*ASM*/
510 
511 #endif /*ARM32_H*/
512