xref: /optee_os/core/arch/arm/include/arm32.h (revision c2f5808039471d8cb9ac43385b63fb8dc6aa8ac4)
1 /*
2  * Copyright (c) 2016, Linaro Limited
3  * Copyright (c) 2014, STMicroelectronics International N.V.
4  * All rights reserved.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions are met:
8  *
9  * 1. Redistributions of source code must retain the above copyright notice,
10  * this list of conditions and the following disclaimer.
11  *
12  * 2. Redistributions in binary form must reproduce the above copyright notice,
13  * this list of conditions and the following disclaimer in the documentation
14  * and/or other materials provided with the distribution.
15  *
16  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
17  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
20  * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
23  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
24  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
25  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
26  * POSSIBILITY OF SUCH DAMAGE.
27  */
28 
29 #ifndef ARM32_H
30 #define ARM32_H
31 
32 #include <stdint.h>
33 #include <util.h>
34 
35 #define CPSR_MODE_MASK	ARM32_CPSR_MODE_MASK
36 #define CPSR_MODE_USR	ARM32_CPSR_MODE_USR
37 #define CPSR_MODE_FIQ	ARM32_CPSR_MODE_FIQ
38 #define CPSR_MODE_IRQ	ARM32_CPSR_MODE_IRQ
39 #define CPSR_MODE_SVC	ARM32_CPSR_MODE_SVC
40 #define CPSR_MODE_MON	ARM32_CPSR_MODE_MON
41 #define CPSR_MODE_ABT	ARM32_CPSR_MODE_ABT
42 #define CPSR_MODE_UND	ARM32_CPSR_MODE_UND
43 #define CPSR_MODE_SYS	ARM32_CPSR_MODE_SYS
44 
45 #define CPSR_T		ARM32_CPSR_T
46 #define CPSR_F_SHIFT	ARM32_CPSR_F_SHIFT
47 #define CPSR_F		ARM32_CPSR_F
48 #define CPSR_I		ARM32_CPSR_I
49 #define CPSR_A		ARM32_CPSR_A
50 #define CPSR_FIA	ARM32_CPSR_FIA
51 #define CPSR_IT_MASK	ARM32_CPSR_IT_MASK
52 #define CPSR_IT_MASK1	ARM32_CPSR_IT_MASK1
53 #define CPSR_IT_MASK2	ARM32_CPSR_IT_MASK2
54 
55 #define SCR_NS		BIT32(0)
56 #define SCR_IRQ		BIT32(1)
57 #define SCR_FIQ		BIT32(2)
58 #define SCR_EA		BIT32(3)
59 #define SCR_FW		BIT32(4)
60 #define SCR_AW		BIT32(5)
61 #define SCR_NET		BIT32(6)
62 #define SCR_SCD		BIT32(7)
63 #define SCR_HCE		BIT32(8)
64 #define SCR_SIF		BIT32(9)
65 
66 #define SCTLR_M		BIT32(0)
67 #define SCTLR_A		BIT32(1)
68 #define SCTLR_C		BIT32(2)
69 #define SCTLR_CP15BEN	BIT32(5)
70 #define SCTLR_SW	BIT32(10)
71 #define SCTLR_Z		BIT32(11)
72 #define SCTLR_I		BIT32(12)
73 #define SCTLR_V		BIT32(13)
74 #define SCTLR_RR	BIT32(14)
75 #define SCTLR_HA	BIT32(17)
76 #define SCTLR_WXN	BIT32(19)
77 #define SCTLR_UWXN	BIT32(20)
78 #define SCTLR_FI	BIT32(21)
79 #define SCTLR_VE	BIT32(24)
80 #define SCTLR_EE	BIT32(25)
81 #define SCTLR_NMFI	BIT32(26)
82 #define SCTLR_TRE	BIT32(28)
83 #define SCTLR_AFE	BIT32(29)
84 #define SCTLR_TE	BIT32(30)
85 
86 #define ACTLR_SMP	BIT32(6)
87 #define ACTLR_DODMBS	BIT32(10)
88 #define ACTLR_L2RADIS	BIT32(11)
89 #define ACTLR_L1RADIS	BIT32(12)
90 #define ACTLR_L1PCTL	BIT32(13)
91 #define ACTLR_DDVM	BIT32(15)
92 #define ACTLR_DDI	BIT32(28)
93 
94 #define NSACR_CP10	BIT32(10)
95 #define NSACR_CP11	BIT32(11)
96 #define NSACR_NSD32DIS	BIT32(14)
97 #define NSACR_NSASEDIS	BIT32(15)
98 #define NSACR_NS_L2ERR	BIT32(17)
99 #define NSACR_NS_SMP	BIT32(18)
100 
101 #define CPACR_ASEDIS	BIT32(31)
102 #define CPACR_D32DIS	BIT32(30)
103 #define CPACR_CP(co_proc, access)	SHIFT_U32((access), ((co_proc) * 2))
104 #define CPACR_CP_ACCESS_DENIED		0x0
105 #define CPACR_CP_ACCESS_PL1_ONLY	0x1
106 #define CPACR_CP_ACCESS_FULL		0x3
107 
108 
109 #define DACR_DOMAIN(num, perm)		SHIFT_U32((perm), ((num) * 2))
110 #define DACR_DOMAIN_PERM_NO_ACCESS	0x0
111 #define DACR_DOMAIN_PERM_CLIENT		0x1
112 #define DACR_DOMAIN_PERM_MANAGER	0x3
113 
114 #define PAR_F			BIT32(0)
115 #define PAR_SS			BIT32(1)
116 #define PAR_LPAE		BIT32(11)
117 #define PAR_PA_SHIFT		12
118 #define PAR32_PA_MASK		(BIT32(20) - 1)
119 #define PAR64_PA_MASK		(BIT64(28) - 1)
120 
121 /*
122  * TTBCR has different register layout if LPAE is enabled or not.
123  * TTBCR.EAE == 0 => LPAE is not enabled
124  * TTBCR.EAE == 1 => LPAE is enabled
125  */
126 #define TTBCR_EAE	BIT32(31)
127 
128 /* When TTBCR.EAE == 0 */
129 #define TTBCR_PD0	BIT32(4)
130 #define TTBCR_PD1	BIT32(5)
131 
132 /* When TTBCR.EAE == 1 */
133 #define TTBCR_T0SZ_SHIFT	0
134 #define TTBCR_EPD0		BIT32(7)
135 #define TTBCR_IRGN0_SHIFT	8
136 #define TTBCR_ORGN0_SHIFT	10
137 #define TTBCR_SH0_SHIFT		12
138 #define TTBCR_T1SZ_SHIFT	16
139 #define TTBCR_A1		BIT32(22)
140 #define TTBCR_EPD1		BIT32(23)
141 #define TTBCR_IRGN1_SHIFT	24
142 #define TTBCR_ORGN1_SHIFT	26
143 #define TTBCR_SH1_SHIFT		28
144 
145 /* Normal memory, Inner/Outer Non-cacheable */
146 #define TTBCR_XRGNX_NC		0x0
147 /* Normal memory, Inner/Outer Write-Back Write-Allocate Cacheable */
148 #define TTBCR_XRGNX_WB		0x1
149 /* Normal memory, Inner/Outer Write-Through Cacheable */
150 #define TTBCR_XRGNX_WT		0x2
151 /* Normal memory, Inner/Outer Write-Back no Write-Allocate Cacheable */
152 #define TTBCR_XRGNX_WBWA	0x3
153 
154 /* Non-shareable */
155 #define TTBCR_SHX_NSH		0x0
156 /* Outer Shareable */
157 #define TTBCR_SHX_OSH		0x2
158 /* Inner Shareable */
159 #define TTBCR_SHX_ISH		0x3
160 
161 #define TTBR_ASID_MASK		0xff
162 #define TTBR_ASID_SHIFT		48
163 
164 
165 #define FSR_LPAE		BIT32(9)
166 #define FSR_WNR			BIT32(11)
167 
168 /* Valid if FSR.LPAE is 1 */
169 #define FSR_STATUS_MASK		(BIT32(6) - 1)
170 
171 /* Valid if FSR.LPAE is 0 */
172 #define FSR_FS_MASK		(BIT32(10) | (BIT32(4) - 1))
173 
174 #ifndef ASM
175 static inline uint32_t read_mpidr(void)
176 {
177 	uint32_t mpidr;
178 
179 	asm volatile ("mrc	p15, 0, %[mpidr], c0, c0, 5"
180 			: [mpidr] "=r" (mpidr)
181 	);
182 
183 	return mpidr;
184 }
185 
186 static inline uint32_t read_sctlr(void)
187 {
188 	uint32_t sctlr;
189 
190 	asm volatile ("mrc	p15, 0, %[sctlr], c1, c0, 0"
191 			: [sctlr] "=r" (sctlr)
192 	);
193 
194 	return sctlr;
195 }
196 
197 static inline void write_sctlr(uint32_t sctlr)
198 {
199 	asm volatile ("mcr	p15, 0, %[sctlr], c1, c0, 0"
200 			: : [sctlr] "r" (sctlr)
201 	);
202 }
203 
204 static inline uint32_t read_cpacr(void)
205 {
206 	uint32_t cpacr;
207 
208 	asm volatile ("mrc	p15, 0, %[cpacr], c1, c0, 2"
209 			: [cpacr] "=r" (cpacr)
210 	);
211 
212 	return cpacr;
213 }
214 
215 static inline void write_cpacr(uint32_t cpacr)
216 {
217 	asm volatile ("mcr	p15, 0, %[cpacr], c1, c0, 2"
218 			: : [cpacr] "r" (cpacr)
219 	);
220 }
221 
222 static inline void write_ttbr0(uint32_t ttbr0)
223 {
224 	asm volatile ("mcr	p15, 0, %[ttbr0], c2, c0, 0"
225 			: : [ttbr0] "r" (ttbr0)
226 	);
227 }
228 
229 static inline void write_ttbr0_64bit(uint64_t ttbr0)
230 {
231 	asm volatile ("mcrr	p15, 0, %Q[ttbr0], %R[ttbr0], c2"
232 			: : [ttbr0] "r" (ttbr0)
233 	);
234 }
235 
236 static inline uint32_t read_ttbr0(void)
237 {
238 	uint32_t ttbr0;
239 
240 	asm volatile ("mrc	p15, 0, %[ttbr0], c2, c0, 0"
241 			: [ttbr0] "=r" (ttbr0)
242 	);
243 
244 	return ttbr0;
245 }
246 
247 static inline uint64_t read_ttbr0_64bit(void)
248 {
249 	uint64_t ttbr0;
250 
251 	asm volatile ("mrrc	p15, 0, %Q[ttbr0], %R[ttbr0], c2"
252 			: [ttbr0] "=r" (ttbr0)
253 	);
254 
255 	return ttbr0;
256 }
257 
258 static inline void write_ttbr1(uint32_t ttbr1)
259 {
260 	asm volatile ("mcr	p15, 0, %[ttbr1], c2, c0, 1"
261 			: : [ttbr1] "r" (ttbr1)
262 	);
263 }
264 
265 static inline void write_ttbr1_64bit(uint64_t ttbr1)
266 {
267 	asm volatile ("mcrr	p15, 1, %Q[ttbr1], %R[ttbr1], c2"
268 			: : [ttbr1] "r" (ttbr1)
269 	);
270 }
271 
272 static inline uint32_t read_ttbr1(void)
273 {
274 	uint32_t ttbr1;
275 
276 	asm volatile ("mrc	p15, 0, %[ttbr1], c2, c0, 1"
277 			: [ttbr1] "=r" (ttbr1)
278 	);
279 
280 	return ttbr1;
281 }
282 
283 
284 static inline void write_ttbcr(uint32_t ttbcr)
285 {
286 	asm volatile ("mcr	p15, 0, %[ttbcr], c2, c0, 2"
287 			: : [ttbcr] "r" (ttbcr)
288 	);
289 }
290 
291 static inline uint32_t read_ttbcr(void)
292 {
293 	uint32_t ttbcr;
294 
295 	asm volatile ("mrc	p15, 0, %[ttbcr], c2, c0, 2"
296 			: [ttbcr] "=r" (ttbcr)
297 	);
298 
299 	return ttbcr;
300 }
301 
302 static inline void write_dacr(uint32_t dacr)
303 {
304 	asm volatile ("mcr	p15, 0, %[dacr], c3, c0, 0"
305 			: : [dacr] "r" (dacr)
306 	);
307 }
308 
309 static inline uint32_t read_ifar(void)
310 {
311 	uint32_t ifar;
312 
313 	asm volatile ("mrc	p15, 0, %[ifar], c6, c0, 2"
314 			: [ifar] "=r" (ifar)
315 	);
316 
317 	return ifar;
318 }
319 
320 static inline uint32_t read_dfar(void)
321 {
322 	uint32_t dfar;
323 
324 	asm volatile ("mrc	p15, 0, %[dfar], c6, c0, 0"
325 			: [dfar] "=r" (dfar)
326 	);
327 
328 	return dfar;
329 }
330 
331 static inline uint32_t read_dfsr(void)
332 {
333 	uint32_t dfsr;
334 
335 	asm volatile ("mrc	p15, 0, %[dfsr], c5, c0, 0"
336 			: [dfsr] "=r" (dfsr)
337 	);
338 
339 	return dfsr;
340 }
341 
342 static inline uint32_t read_ifsr(void)
343 {
344 	uint32_t ifsr;
345 
346 	asm volatile ("mrc	p15, 0, %[ifsr], c5, c0, 1"
347 			: [ifsr] "=r" (ifsr)
348 	);
349 
350 	return ifsr;
351 }
352 
353 static inline void isb(void)
354 {
355 	asm volatile ("isb");
356 }
357 
358 static inline void dsb(void)
359 {
360 	asm volatile ("dsb");
361 }
362 
363 /* Address translate privileged write translation (current state secure PL1) */
364 static inline void write_ats1cpw(uint32_t va)
365 {
366 	asm volatile ("mcr	p15, 0, %0, c7, c8, 1" : : "r" (va));
367 }
368 
369 static inline uint32_t read_par32(void)
370 {
371 	uint32_t val;
372 
373 	asm volatile ("mrc	p15, 0, %0, c7, c4, 0" : "=r" (val));
374 	return val;
375 }
376 
377 #ifdef CFG_WITH_LPAE
378 static inline uint64_t read_par64(void)
379 {
380 	uint64_t val;
381 
382 	asm volatile ("mrrc	p15, 0, %Q0, %R0, c7" : "=r" (val));
383 	return val;
384 }
385 #endif
386 
387 static inline void write_mair0(uint32_t mair0)
388 {
389 	asm volatile ("mcr	p15, 0, %[mair0], c10, c2, 0"
390 			: : [mair0] "r" (mair0)
391 	);
392 }
393 
394 static inline void write_prrr(uint32_t prrr)
395 {
396 	/*
397 	 * Same physical register as MAIR0.
398 	 *
399 	 * When an implementation includes the Large Physical Address
400 	 * Extension, and address translation is using the Long-descriptor
401 	 * translation table formats, MAIR0 replaces the PRRR
402 	 */
403 	write_mair0(prrr);
404 }
405 
406 static inline void write_mair1(uint32_t mair1)
407 {
408 	asm volatile ("mcr	p15, 0, %[mair1], c10, c2, 1"
409 			: : [mair1] "r" (mair1)
410 	);
411 }
412 
413 static inline void write_nmrr(uint32_t nmrr)
414 {
415 	/*
416 	 * Same physical register as MAIR1.
417 	 *
418 	 * When an implementation includes the Large Physical Address
419 	 * Extension, and address translation is using the Long-descriptor
420 	 * translation table formats, MAIR1 replaces the NMRR
421 	 */
422 	write_mair1(nmrr);
423 }
424 
425 static inline uint32_t read_contextidr(void)
426 {
427 	uint32_t contextidr;
428 
429 	asm volatile ("mrc	p15, 0, %[contextidr], c13, c0, 1"
430 			: [contextidr] "=r" (contextidr)
431 	);
432 
433 	return contextidr;
434 }
435 
436 static inline void write_contextidr(uint32_t contextidr)
437 {
438 	asm volatile ("mcr	p15, 0, %[contextidr], c13, c0, 1"
439 			: : [contextidr] "r" (contextidr)
440 	);
441 }
442 
443 static inline uint32_t read_cpsr(void)
444 {
445 	uint32_t cpsr;
446 
447 	asm volatile ("mrs	%[cpsr], cpsr"
448 			: [cpsr] "=r" (cpsr)
449 	);
450 	return cpsr;
451 }
452 
453 static inline void write_cpsr(uint32_t cpsr)
454 {
455 	asm volatile ("msr	cpsr_fsxc, %[cpsr]"
456 			: : [cpsr] "r" (cpsr)
457 	);
458 }
459 
460 static inline uint32_t read_spsr(void)
461 {
462 	uint32_t spsr;
463 
464 	asm volatile ("mrs	%[spsr], spsr"
465 			: [spsr] "=r" (spsr)
466 	);
467 	return spsr;
468 }
469 
470 static inline uint32_t read_actlr(void)
471 {
472 	uint32_t actlr;
473 
474 	asm volatile ("mrc	p15, 0, %[actlr], c1, c0, 1"
475 			: [actlr] "=r" (actlr)
476 	);
477 
478 	return actlr;
479 }
480 
481 static inline void write_actlr(uint32_t actlr)
482 {
483 	asm volatile ("mcr	p15, 0, %[actlr], c1, c0, 1"
484 			: : [actlr] "r" (actlr)
485 	);
486 }
487 
488 static inline uint32_t read_nsacr(void)
489 {
490 	uint32_t nsacr;
491 
492 	asm volatile ("mrc	p15, 0, %[nsacr], c1, c1, 2"
493 			: [nsacr] "=r" (nsacr)
494 	);
495 
496 	return nsacr;
497 }
498 
499 static inline void write_nsacr(uint32_t nsacr)
500 {
501 	asm volatile ("mcr	p15, 0, %[nsacr], c1, c1, 2"
502 			: : [nsacr] "r" (nsacr)
503 	);
504 }
505 
506 static inline uint64_t read_cntpct(void)
507 {
508 	uint64_t val;
509 
510 	asm volatile("mrrc p15, 0, %Q0, %R0, c14" : "=r" (val));
511 	return val;
512 }
513 
514 static inline uint32_t read_cntfrq(void)
515 {
516 	uint32_t frq;
517 
518 	asm volatile("mrc p15, 0, %0, c14, c0, 0" : "=r" (frq));
519 	return frq;
520 }
521 #endif /*ASM*/
522 
523 #endif /*ARM32_H*/
524