xref: /optee_os/core/arch/arm/include/arm32.h (revision baa999cd61495093ce1e9c43251e655b3a14da67)
1 /* SPDX-License-Identifier: BSD-2-Clause */
2 /*
3  * Copyright (c) 2016, Linaro Limited
4  * Copyright (c) 2014, STMicroelectronics International N.V.
5  * All rights reserved.
6  *
7  * Redistribution and use in source and binary forms, with or without
8  * modification, are permitted provided that the following conditions are met:
9  *
10  * 1. Redistributions of source code must retain the above copyright notice,
11  * this list of conditions and the following disclaimer.
12  *
13  * 2. Redistributions in binary form must reproduce the above copyright notice,
14  * this list of conditions and the following disclaimer in the documentation
15  * and/or other materials provided with the distribution.
16  *
17  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
21  * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27  * POSSIBILITY OF SUCH DAMAGE.
28  */
29 
30 #ifndef ARM32_H
31 #define ARM32_H
32 
33 #include <sys/cdefs.h>
34 #include <stdint.h>
35 #include <util.h>
36 
37 #define CPSR_MODE_MASK	ARM32_CPSR_MODE_MASK
38 #define CPSR_MODE_USR	ARM32_CPSR_MODE_USR
39 #define CPSR_MODE_FIQ	ARM32_CPSR_MODE_FIQ
40 #define CPSR_MODE_IRQ	ARM32_CPSR_MODE_IRQ
41 #define CPSR_MODE_SVC	ARM32_CPSR_MODE_SVC
42 #define CPSR_MODE_MON	ARM32_CPSR_MODE_MON
43 #define CPSR_MODE_ABT	ARM32_CPSR_MODE_ABT
44 #define CPSR_MODE_UND	ARM32_CPSR_MODE_UND
45 #define CPSR_MODE_SYS	ARM32_CPSR_MODE_SYS
46 
47 #define CPSR_T		ARM32_CPSR_T
48 #define CPSR_F_SHIFT	ARM32_CPSR_F_SHIFT
49 #define CPSR_F		ARM32_CPSR_F
50 #define CPSR_I		ARM32_CPSR_I
51 #define CPSR_A		ARM32_CPSR_A
52 #define CPSR_FIA	ARM32_CPSR_FIA
53 #define CPSR_IT_MASK	ARM32_CPSR_IT_MASK
54 #define CPSR_IT_MASK1	ARM32_CPSR_IT_MASK1
55 #define CPSR_IT_MASK2	ARM32_CPSR_IT_MASK2
56 
57 #define SCR_NS		BIT32(0)
58 #define SCR_IRQ		BIT32(1)
59 #define SCR_FIQ		BIT32(2)
60 #define SCR_EA		BIT32(3)
61 #define SCR_FW		BIT32(4)
62 #define SCR_AW		BIT32(5)
63 #define SCR_NET		BIT32(6)
64 #define SCR_SCD		BIT32(7)
65 #define SCR_HCE		BIT32(8)
66 #define SCR_SIF		BIT32(9)
67 
68 #define SCTLR_M		BIT32(0)
69 #define SCTLR_A		BIT32(1)
70 #define SCTLR_C		BIT32(2)
71 #define SCTLR_CP15BEN	BIT32(5)
72 #define SCTLR_SW	BIT32(10)
73 #define SCTLR_Z		BIT32(11)
74 #define SCTLR_I		BIT32(12)
75 #define SCTLR_V		BIT32(13)
76 #define SCTLR_RR	BIT32(14)
77 #define SCTLR_HA	BIT32(17)
78 #define SCTLR_WXN	BIT32(19)
79 #define SCTLR_UWXN	BIT32(20)
80 #define SCTLR_FI	BIT32(21)
81 #define SCTLR_VE	BIT32(24)
82 #define SCTLR_EE	BIT32(25)
83 #define SCTLR_NMFI	BIT32(26)
84 #define SCTLR_TRE	BIT32(28)
85 #define SCTLR_AFE	BIT32(29)
86 #define SCTLR_TE	BIT32(30)
87 
88 /* Only valid for Cortex-A15 */
89 #define ACTLR_CA15_ENABLE_INVALIDATE_BTB	BIT(0)
90 
91 #define ACTLR_SMP	BIT32(6)
92 
93 #define NSACR_CP10	BIT32(10)
94 #define NSACR_CP11	BIT32(11)
95 #define NSACR_NSD32DIS	BIT32(14)
96 #define NSACR_NSASEDIS	BIT32(15)
97 #define NSACR_NS_L2ERR	BIT32(17)
98 #define NSACR_NS_SMP	BIT32(18)
99 
100 #define CPACR_ASEDIS	BIT32(31)
101 #define CPACR_D32DIS	BIT32(30)
102 #define CPACR_CP(co_proc, access)	SHIFT_U32((access), ((co_proc) * 2))
103 #define CPACR_CP_ACCESS_DENIED		0x0
104 #define CPACR_CP_ACCESS_PL1_ONLY	0x1
105 #define CPACR_CP_ACCESS_FULL		0x3
106 
107 
108 #define DACR_DOMAIN(num, perm)		SHIFT_U32((perm), ((num) * 2))
109 #define DACR_DOMAIN_PERM_NO_ACCESS	0x0
110 #define DACR_DOMAIN_PERM_CLIENT		0x1
111 #define DACR_DOMAIN_PERM_MANAGER	0x3
112 
113 #define PAR_F			BIT32(0)
114 #define PAR_SS			BIT32(1)
115 #define PAR_LPAE		BIT32(11)
116 #define PAR_PA_SHIFT		12
117 #define PAR32_PA_MASK		(BIT32(20) - 1)
118 #define PAR64_PA_MASK		(BIT64(28) - 1)
119 
120 /*
121  * TTBCR has different register layout if LPAE is enabled or not.
122  * TTBCR.EAE == 0 => LPAE is not enabled
123  * TTBCR.EAE == 1 => LPAE is enabled
124  */
125 #define TTBCR_EAE	BIT32(31)
126 
127 /* When TTBCR.EAE == 0 */
128 #define TTBCR_PD0	BIT32(4)
129 #define TTBCR_PD1	BIT32(5)
130 
131 /* When TTBCR.EAE == 1 */
132 #define TTBCR_T0SZ_SHIFT	0
133 #define TTBCR_EPD0		BIT32(7)
134 #define TTBCR_IRGN0_SHIFT	8
135 #define TTBCR_ORGN0_SHIFT	10
136 #define TTBCR_SH0_SHIFT		12
137 #define TTBCR_T1SZ_SHIFT	16
138 #define TTBCR_A1		BIT32(22)
139 #define TTBCR_EPD1		BIT32(23)
140 #define TTBCR_IRGN1_SHIFT	24
141 #define TTBCR_ORGN1_SHIFT	26
142 #define TTBCR_SH1_SHIFT		28
143 
144 /* Normal memory, Inner/Outer Non-cacheable */
145 #define TTBCR_XRGNX_NC		0x0
146 /* Normal memory, Inner/Outer Write-Back Write-Allocate Cacheable */
147 #define TTBCR_XRGNX_WB		0x1
148 /* Normal memory, Inner/Outer Write-Through Cacheable */
149 #define TTBCR_XRGNX_WT		0x2
150 /* Normal memory, Inner/Outer Write-Back no Write-Allocate Cacheable */
151 #define TTBCR_XRGNX_WBWA	0x3
152 
153 /* Non-shareable */
154 #define TTBCR_SHX_NSH		0x0
155 /* Outer Shareable */
156 #define TTBCR_SHX_OSH		0x2
157 /* Inner Shareable */
158 #define TTBCR_SHX_ISH		0x3
159 
160 #define TTBR_ASID_MASK		0xff
161 #define TTBR_ASID_SHIFT		48
162 
163 
164 #define FSR_LPAE		BIT32(9)
165 #define FSR_WNR			BIT32(11)
166 
167 /* Valid if FSR.LPAE is 1 */
168 #define FSR_STATUS_MASK		(BIT32(6) - 1)
169 
170 /* Valid if FSR.LPAE is 0 */
171 #define FSR_FS_MASK		(BIT32(10) | (BIT32(4) - 1))
172 
173 /* ID_PFR1 bit fields */
174 #define IDPFR1_VIRT_SHIFT            12
175 #define IDPFR1_VIRT_MASK             (0xF << IDPFR1_VIRT_SHIFT)
176 #define IDPFR1_GENTIMER_SHIFT        16
177 #define IDPFR1_GENTIMER_MASK         (0xF << IDPFR1_GENTIMER_SHIFT)
178 
179 #ifndef ASM
180 static inline uint32_t read_mpidr(void)
181 {
182 	uint32_t mpidr;
183 
184 	asm volatile ("mrc	p15, 0, %[mpidr], c0, c0, 5"
185 			: [mpidr] "=r" (mpidr)
186 	);
187 
188 	return mpidr;
189 }
190 
191 static inline uint32_t read_sctlr(void)
192 {
193 	uint32_t sctlr;
194 
195 	asm volatile ("mrc	p15, 0, %[sctlr], c1, c0, 0"
196 			: [sctlr] "=r" (sctlr)
197 	);
198 
199 	return sctlr;
200 }
201 
202 static inline void write_sctlr(uint32_t sctlr)
203 {
204 	asm volatile ("mcr	p15, 0, %[sctlr], c1, c0, 0"
205 			: : [sctlr] "r" (sctlr)
206 	);
207 }
208 
209 static inline uint32_t read_cpacr(void)
210 {
211 	uint32_t cpacr;
212 
213 	asm volatile ("mrc	p15, 0, %[cpacr], c1, c0, 2"
214 			: [cpacr] "=r" (cpacr)
215 	);
216 
217 	return cpacr;
218 }
219 
220 static inline void write_cpacr(uint32_t cpacr)
221 {
222 	asm volatile ("mcr	p15, 0, %[cpacr], c1, c0, 2"
223 			: : [cpacr] "r" (cpacr)
224 	);
225 }
226 
227 static inline void write_ttbr0(uint32_t ttbr0)
228 {
229 	asm volatile ("mcr	p15, 0, %[ttbr0], c2, c0, 0"
230 			: : [ttbr0] "r" (ttbr0)
231 	);
232 }
233 
234 static inline void write_ttbr0_64bit(uint64_t ttbr0)
235 {
236 	asm volatile ("mcrr	p15, 0, %Q[ttbr0], %R[ttbr0], c2"
237 			: : [ttbr0] "r" (ttbr0)
238 	);
239 }
240 
241 static inline uint32_t read_ttbr0(void)
242 {
243 	uint32_t ttbr0;
244 
245 	asm volatile ("mrc	p15, 0, %[ttbr0], c2, c0, 0"
246 			: [ttbr0] "=r" (ttbr0)
247 	);
248 
249 	return ttbr0;
250 }
251 
252 static inline uint64_t read_ttbr0_64bit(void)
253 {
254 	uint64_t ttbr0;
255 
256 	asm volatile ("mrrc	p15, 0, %Q[ttbr0], %R[ttbr0], c2"
257 			: [ttbr0] "=r" (ttbr0)
258 	);
259 
260 	return ttbr0;
261 }
262 
263 static inline void write_ttbr1(uint32_t ttbr1)
264 {
265 	asm volatile ("mcr	p15, 0, %[ttbr1], c2, c0, 1"
266 			: : [ttbr1] "r" (ttbr1)
267 	);
268 }
269 
270 static inline void write_ttbr1_64bit(uint64_t ttbr1)
271 {
272 	asm volatile ("mcrr	p15, 1, %Q[ttbr1], %R[ttbr1], c2"
273 			: : [ttbr1] "r" (ttbr1)
274 	);
275 }
276 
277 static inline uint32_t read_ttbr1(void)
278 {
279 	uint32_t ttbr1;
280 
281 	asm volatile ("mrc	p15, 0, %[ttbr1], c2, c0, 1"
282 			: [ttbr1] "=r" (ttbr1)
283 	);
284 
285 	return ttbr1;
286 }
287 
288 
289 static inline void write_ttbcr(uint32_t ttbcr)
290 {
291 	asm volatile ("mcr	p15, 0, %[ttbcr], c2, c0, 2"
292 			: : [ttbcr] "r" (ttbcr)
293 	);
294 }
295 
296 static inline uint32_t read_ttbcr(void)
297 {
298 	uint32_t ttbcr;
299 
300 	asm volatile ("mrc	p15, 0, %[ttbcr], c2, c0, 2"
301 			: [ttbcr] "=r" (ttbcr)
302 	);
303 
304 	return ttbcr;
305 }
306 
307 static inline void write_dacr(uint32_t dacr)
308 {
309 	asm volatile ("mcr	p15, 0, %[dacr], c3, c0, 0"
310 			: : [dacr] "r" (dacr)
311 	);
312 }
313 
314 static inline uint32_t read_ifar(void)
315 {
316 	uint32_t ifar;
317 
318 	asm volatile ("mrc	p15, 0, %[ifar], c6, c0, 2"
319 			: [ifar] "=r" (ifar)
320 	);
321 
322 	return ifar;
323 }
324 
325 static inline uint32_t read_dfar(void)
326 {
327 	uint32_t dfar;
328 
329 	asm volatile ("mrc	p15, 0, %[dfar], c6, c0, 0"
330 			: [dfar] "=r" (dfar)
331 	);
332 
333 	return dfar;
334 }
335 
336 static inline uint32_t read_dfsr(void)
337 {
338 	uint32_t dfsr;
339 
340 	asm volatile ("mrc	p15, 0, %[dfsr], c5, c0, 0"
341 			: [dfsr] "=r" (dfsr)
342 	);
343 
344 	return dfsr;
345 }
346 
347 static inline uint32_t read_ifsr(void)
348 {
349 	uint32_t ifsr;
350 
351 	asm volatile ("mrc	p15, 0, %[ifsr], c5, c0, 1"
352 			: [ifsr] "=r" (ifsr)
353 	);
354 
355 	return ifsr;
356 }
357 
358 static inline void write_scr(uint32_t scr)
359 {
360 	asm volatile ("mcr	p15, 0, %[scr], c1, c1, 0"
361 			: : [scr] "r" (scr)
362 	);
363 }
364 
365 static inline void isb(void)
366 {
367 	asm volatile ("isb");
368 }
369 
370 static inline void dsb(void)
371 {
372 	asm volatile ("dsb");
373 }
374 
375 static inline void dsb_ish(void)
376 {
377 	asm volatile ("dsb ish");
378 }
379 
380 static inline void dsb_ishst(void)
381 {
382 	asm volatile ("dsb ishst");
383 }
384 
385 static inline void dmb(void)
386 {
387 	asm volatile ("dmb");
388 }
389 
390 static inline void sev(void)
391 {
392 	asm volatile ("sev");
393 }
394 
395 static inline void wfe(void)
396 {
397 	asm volatile ("wfe");
398 }
399 
400 /* Address translate privileged write translation (current state secure PL1) */
401 static inline void write_ats1cpw(uint32_t va)
402 {
403 	asm volatile ("mcr	p15, 0, %0, c7, c8, 1" : : "r" (va));
404 }
405 
406 static inline void write_ats1cpr(uint32_t va)
407 {
408 	asm volatile ("mcr	p15, 0, %0, c7, c8, 0" : : "r" (va));
409 }
410 
411 static inline void write_ats1cpuw(uint32_t va)
412 {
413 	asm volatile ("mcr	p15, 0, %0, c7, c8, 3" : : "r" (va));
414 }
415 
416 static inline void write_ats1cpur(uint32_t va)
417 {
418 	asm volatile ("mcr	p15, 0, %0, c7, c8, 2" : : "r" (va));
419 }
420 
421 static inline uint32_t read_par32(void)
422 {
423 	uint32_t val;
424 
425 	asm volatile ("mrc	p15, 0, %0, c7, c4, 0" : "=r" (val));
426 	return val;
427 }
428 
429 #ifdef CFG_WITH_LPAE
430 static inline uint64_t read_par64(void)
431 {
432 	uint64_t val;
433 
434 	asm volatile ("mrrc	p15, 0, %Q0, %R0, c7" : "=r" (val));
435 	return val;
436 }
437 #endif
438 
439 static inline void write_tlbimvaais(uint32_t mva)
440 {
441 	asm volatile ("mcr	p15, 0, %[mva], c8, c3, 3"
442 			: : [mva] "r" (mva)
443 	);
444 }
445 
446 static inline void write_mair0(uint32_t mair0)
447 {
448 	asm volatile ("mcr	p15, 0, %[mair0], c10, c2, 0"
449 			: : [mair0] "r" (mair0)
450 	);
451 }
452 
453 static inline void write_prrr(uint32_t prrr)
454 {
455 	/*
456 	 * Same physical register as MAIR0.
457 	 *
458 	 * When an implementation includes the Large Physical Address
459 	 * Extension, and address translation is using the Long-descriptor
460 	 * translation table formats, MAIR0 replaces the PRRR
461 	 */
462 	write_mair0(prrr);
463 }
464 
465 static inline void write_mair1(uint32_t mair1)
466 {
467 	asm volatile ("mcr	p15, 0, %[mair1], c10, c2, 1"
468 			: : [mair1] "r" (mair1)
469 	);
470 }
471 
472 static inline void write_nmrr(uint32_t nmrr)
473 {
474 	/*
475 	 * Same physical register as MAIR1.
476 	 *
477 	 * When an implementation includes the Large Physical Address
478 	 * Extension, and address translation is using the Long-descriptor
479 	 * translation table formats, MAIR1 replaces the NMRR
480 	 */
481 	write_mair1(nmrr);
482 }
483 
484 static inline uint32_t read_contextidr(void)
485 {
486 	uint32_t contextidr;
487 
488 	asm volatile ("mrc	p15, 0, %[contextidr], c13, c0, 1"
489 			: [contextidr] "=r" (contextidr)
490 	);
491 
492 	return contextidr;
493 }
494 
495 static inline void write_contextidr(uint32_t contextidr)
496 {
497 	asm volatile ("mcr	p15, 0, %[contextidr], c13, c0, 1"
498 			: : [contextidr] "r" (contextidr)
499 	);
500 }
501 
502 static inline uint32_t read_cpsr(void)
503 {
504 	uint32_t cpsr;
505 
506 	asm volatile ("mrs	%[cpsr], cpsr"
507 			: [cpsr] "=r" (cpsr)
508 	);
509 	return cpsr;
510 }
511 
512 static inline void write_cpsr(uint32_t cpsr)
513 {
514 	asm volatile ("msr	cpsr_fsxc, %[cpsr]"
515 			: : [cpsr] "r" (cpsr)
516 	);
517 }
518 
519 static inline uint32_t read_spsr(void)
520 {
521 	uint32_t spsr;
522 
523 	asm volatile ("mrs	%[spsr], spsr"
524 			: [spsr] "=r" (spsr)
525 	);
526 	return spsr;
527 }
528 
529 static inline uint32_t read_actlr(void)
530 {
531 	uint32_t actlr;
532 
533 	asm volatile ("mrc	p15, 0, %[actlr], c1, c0, 1"
534 			: [actlr] "=r" (actlr)
535 	);
536 
537 	return actlr;
538 }
539 
540 static inline void write_actlr(uint32_t actlr)
541 {
542 	asm volatile ("mcr	p15, 0, %[actlr], c1, c0, 1"
543 			: : [actlr] "r" (actlr)
544 	);
545 }
546 
547 static inline uint32_t read_nsacr(void)
548 {
549 	uint32_t nsacr;
550 
551 	asm volatile ("mrc	p15, 0, %[nsacr], c1, c1, 2"
552 			: [nsacr] "=r" (nsacr)
553 	);
554 
555 	return nsacr;
556 }
557 
558 static inline void write_nsacr(uint32_t nsacr)
559 {
560 	asm volatile ("mcr	p15, 0, %[nsacr], c1, c1, 2"
561 			: : [nsacr] "r" (nsacr)
562 	);
563 }
564 
565 static inline uint64_t read_cntpct(void)
566 {
567 	uint64_t val;
568 
569 	asm volatile("mrrc p15, 0, %Q0, %R0, c14" : "=r" (val));
570 	return val;
571 }
572 
573 static inline uint32_t read_cntfrq(void)
574 {
575 	uint32_t frq;
576 
577 	asm volatile("mrc p15, 0, %0, c14, c0, 0" : "=r" (frq));
578 	return frq;
579 }
580 
581 static inline void write_cntfrq(uint32_t frq)
582 {
583 	asm volatile("mcr p15, 0, %0, c14, c0, 0" : : "r" (frq));
584 }
585 
586 static inline uint32_t read_cntkctl(void)
587 {
588 	uint32_t cntkctl;
589 
590 	asm volatile("mrc p15, 0, %0, c14, c1, 0" : "=r" (cntkctl));
591 	return cntkctl;
592 }
593 
594 static inline void write_cntkctl(uint32_t cntkctl)
595 {
596 	asm volatile("mcr p15, 0, %0, c14, c1, 0" : : "r" (cntkctl));
597 }
598 
599 static __always_inline uint32_t read_pc(void)
600 {
601 	uint32_t val;
602 
603 	asm volatile ("adr %0, ." : "=r" (val));
604 	return val;
605 }
606 
607 static __always_inline uint32_t read_sp(void)
608 {
609 	uint32_t val;
610 
611 	asm volatile ("mov %0, sp" : "=r" (val));
612 	return val;
613 }
614 
615 static __always_inline uint32_t read_lr(void)
616 {
617 	uint32_t val;
618 
619 	asm volatile ("mov %0, lr" : "=r" (val));
620 	return val;
621 }
622 
623 static __always_inline uint32_t read_fp(void)
624 {
625 	uint32_t val;
626 
627 	asm volatile ("mov %0, fp" : "=r" (val));
628 	return val;
629 }
630 
631 static __always_inline uint32_t read_r7(void)
632 {
633 	uint32_t val;
634 
635 	asm volatile ("mov %0, r7" : "=r" (val));
636 	return val;
637 }
638 
639 /* Register read/write functions for GICC registers by using system interface */
640 static inline uint32_t read_icc_ctlr(void)
641 {
642 	uint32_t v;
643 
644 	asm volatile ("mrc p15,0,%0,c12,c12,4" : "=r" (v));
645 	return v;
646 }
647 
648 static inline void write_icc_ctlr(uint32_t v)
649 {
650 	asm volatile ("mcr p15,0,%0,c12,c12,4" : : "r" (v));
651 }
652 
653 static inline void write_icc_pmr(uint32_t v)
654 {
655 	asm volatile ("mcr p15,0,%0,c4,c6,0" : : "r" (v));
656 }
657 
658 static inline uint32_t read_icc_iar0(void)
659 {
660 	uint32_t v;
661 
662 	asm volatile ("mrc p15,0,%0,c12,c8,0" : "=r" (v));
663 	return v;
664 }
665 
666 static inline void write_icc_eoir0(uint32_t v)
667 {
668 	asm volatile ("mcr p15,0,%0,c12,c8,1" : : "r" (v));
669 }
670 
671 static inline uint64_t read_pmu_ccnt(void)
672 {
673 	uint32_t val;
674 
675 	asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r"(val));
676 	return val;
677 }
678 
679 static inline void wfi(void)
680 {
681 	asm volatile("wfi");
682 }
683 #endif /*ASM*/
684 
685 #endif /*ARM32_H*/
686