xref: /optee_os/core/arch/arm/include/arm32.h (revision 8e81e2f5366a971afdd2ac47fb8529d1def5feb0)
1 /*
2  * Copyright (c) 2016, Linaro Limited
3  * Copyright (c) 2014, STMicroelectronics International N.V.
4  * All rights reserved.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions are met:
8  *
9  * 1. Redistributions of source code must retain the above copyright notice,
10  * this list of conditions and the following disclaimer.
11  *
12  * 2. Redistributions in binary form must reproduce the above copyright notice,
13  * this list of conditions and the following disclaimer in the documentation
14  * and/or other materials provided with the distribution.
15  *
16  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
17  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
20  * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
23  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
24  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
25  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
26  * POSSIBILITY OF SUCH DAMAGE.
27  */
28 
29 #ifndef ARM32_H
30 #define ARM32_H
31 
32 #include <sys/cdefs.h>
33 #include <stdint.h>
34 #include <util.h>
35 
36 #define CORTEX_A7_PART_NUM		0xC07
37 #define CORTEX_A9_PART_NUM		0xC09
38 
39 #define MIDR_PRIMARY_PART_NUM_SHIFT	4
40 #define MIDR_PRIMARY_PART_NUM_WIDTH	12
41 
42 #define CPSR_MODE_MASK	ARM32_CPSR_MODE_MASK
43 #define CPSR_MODE_USR	ARM32_CPSR_MODE_USR
44 #define CPSR_MODE_FIQ	ARM32_CPSR_MODE_FIQ
45 #define CPSR_MODE_IRQ	ARM32_CPSR_MODE_IRQ
46 #define CPSR_MODE_SVC	ARM32_CPSR_MODE_SVC
47 #define CPSR_MODE_MON	ARM32_CPSR_MODE_MON
48 #define CPSR_MODE_ABT	ARM32_CPSR_MODE_ABT
49 #define CPSR_MODE_UND	ARM32_CPSR_MODE_UND
50 #define CPSR_MODE_SYS	ARM32_CPSR_MODE_SYS
51 
52 #define CPSR_T		ARM32_CPSR_T
53 #define CPSR_F_SHIFT	ARM32_CPSR_F_SHIFT
54 #define CPSR_F		ARM32_CPSR_F
55 #define CPSR_I		ARM32_CPSR_I
56 #define CPSR_A		ARM32_CPSR_A
57 #define CPSR_FIA	ARM32_CPSR_FIA
58 #define CPSR_IT_MASK	ARM32_CPSR_IT_MASK
59 #define CPSR_IT_MASK1	ARM32_CPSR_IT_MASK1
60 #define CPSR_IT_MASK2	ARM32_CPSR_IT_MASK2
61 
62 #define SCR_NS		BIT32(0)
63 #define SCR_IRQ		BIT32(1)
64 #define SCR_FIQ		BIT32(2)
65 #define SCR_EA		BIT32(3)
66 #define SCR_FW		BIT32(4)
67 #define SCR_AW		BIT32(5)
68 #define SCR_NET		BIT32(6)
69 #define SCR_SCD		BIT32(7)
70 #define SCR_HCE		BIT32(8)
71 #define SCR_SIF		BIT32(9)
72 
73 #define SCTLR_M		BIT32(0)
74 #define SCTLR_A		BIT32(1)
75 #define SCTLR_C		BIT32(2)
76 #define SCTLR_CP15BEN	BIT32(5)
77 #define SCTLR_SW	BIT32(10)
78 #define SCTLR_Z		BIT32(11)
79 #define SCTLR_I		BIT32(12)
80 #define SCTLR_V		BIT32(13)
81 #define SCTLR_RR	BIT32(14)
82 #define SCTLR_HA	BIT32(17)
83 #define SCTLR_WXN	BIT32(19)
84 #define SCTLR_UWXN	BIT32(20)
85 #define SCTLR_FI	BIT32(21)
86 #define SCTLR_VE	BIT32(24)
87 #define SCTLR_EE	BIT32(25)
88 #define SCTLR_NMFI	BIT32(26)
89 #define SCTLR_TRE	BIT32(28)
90 #define SCTLR_AFE	BIT32(29)
91 #define SCTLR_TE	BIT32(30)
92 
93 #define ACTLR_SMP	BIT32(6)
94 #define ACTLR_DODMBS	BIT32(10)
95 #define ACTLR_L2RADIS	BIT32(11)
96 #define ACTLR_L1RADIS	BIT32(12)
97 #define ACTLR_L1PCTL	BIT32(13)
98 #define ACTLR_DDVM	BIT32(15)
99 #define ACTLR_DDI	BIT32(28)
100 
101 #define NSACR_CP10	BIT32(10)
102 #define NSACR_CP11	BIT32(11)
103 #define NSACR_NSD32DIS	BIT32(14)
104 #define NSACR_NSASEDIS	BIT32(15)
105 #define NSACR_NS_L2ERR	BIT32(17)
106 #define NSACR_NS_SMP	BIT32(18)
107 
108 #define CPACR_ASEDIS	BIT32(31)
109 #define CPACR_D32DIS	BIT32(30)
110 #define CPACR_CP(co_proc, access)	SHIFT_U32((access), ((co_proc) * 2))
111 #define CPACR_CP_ACCESS_DENIED		0x0
112 #define CPACR_CP_ACCESS_PL1_ONLY	0x1
113 #define CPACR_CP_ACCESS_FULL		0x3
114 
115 
116 #define DACR_DOMAIN(num, perm)		SHIFT_U32((perm), ((num) * 2))
117 #define DACR_DOMAIN_PERM_NO_ACCESS	0x0
118 #define DACR_DOMAIN_PERM_CLIENT		0x1
119 #define DACR_DOMAIN_PERM_MANAGER	0x3
120 
121 #define PAR_F			BIT32(0)
122 #define PAR_SS			BIT32(1)
123 #define PAR_LPAE		BIT32(11)
124 #define PAR_PA_SHIFT		12
125 #define PAR32_PA_MASK		(BIT32(20) - 1)
126 #define PAR64_PA_MASK		(BIT64(28) - 1)
127 
128 /*
129  * TTBCR has different register layout if LPAE is enabled or not.
130  * TTBCR.EAE == 0 => LPAE is not enabled
131  * TTBCR.EAE == 1 => LPAE is enabled
132  */
133 #define TTBCR_EAE	BIT32(31)
134 
135 /* When TTBCR.EAE == 0 */
136 #define TTBCR_PD0	BIT32(4)
137 #define TTBCR_PD1	BIT32(5)
138 
139 /* When TTBCR.EAE == 1 */
140 #define TTBCR_T0SZ_SHIFT	0
141 #define TTBCR_EPD0		BIT32(7)
142 #define TTBCR_IRGN0_SHIFT	8
143 #define TTBCR_ORGN0_SHIFT	10
144 #define TTBCR_SH0_SHIFT		12
145 #define TTBCR_T1SZ_SHIFT	16
146 #define TTBCR_A1		BIT32(22)
147 #define TTBCR_EPD1		BIT32(23)
148 #define TTBCR_IRGN1_SHIFT	24
149 #define TTBCR_ORGN1_SHIFT	26
150 #define TTBCR_SH1_SHIFT		28
151 
152 /* Normal memory, Inner/Outer Non-cacheable */
153 #define TTBCR_XRGNX_NC		0x0
154 /* Normal memory, Inner/Outer Write-Back Write-Allocate Cacheable */
155 #define TTBCR_XRGNX_WB		0x1
156 /* Normal memory, Inner/Outer Write-Through Cacheable */
157 #define TTBCR_XRGNX_WT		0x2
158 /* Normal memory, Inner/Outer Write-Back no Write-Allocate Cacheable */
159 #define TTBCR_XRGNX_WBWA	0x3
160 
161 /* Non-shareable */
162 #define TTBCR_SHX_NSH		0x0
163 /* Outer Shareable */
164 #define TTBCR_SHX_OSH		0x2
165 /* Inner Shareable */
166 #define TTBCR_SHX_ISH		0x3
167 
168 #define TTBR_ASID_MASK		0xff
169 #define TTBR_ASID_SHIFT		48
170 
171 
172 #define FSR_LPAE		BIT32(9)
173 #define FSR_WNR			BIT32(11)
174 
175 /* Valid if FSR.LPAE is 1 */
176 #define FSR_STATUS_MASK		(BIT32(6) - 1)
177 
178 /* Valid if FSR.LPAE is 0 */
179 #define FSR_FS_MASK		(BIT32(10) | (BIT32(4) - 1))
180 
181 #ifndef ASM
182 static inline uint32_t read_mpidr(void)
183 {
184 	uint32_t mpidr;
185 
186 	asm volatile ("mrc	p15, 0, %[mpidr], c0, c0, 5"
187 			: [mpidr] "=r" (mpidr)
188 	);
189 
190 	return mpidr;
191 }
192 
193 static inline uint32_t read_sctlr(void)
194 {
195 	uint32_t sctlr;
196 
197 	asm volatile ("mrc	p15, 0, %[sctlr], c1, c0, 0"
198 			: [sctlr] "=r" (sctlr)
199 	);
200 
201 	return sctlr;
202 }
203 
204 static inline void write_sctlr(uint32_t sctlr)
205 {
206 	asm volatile ("mcr	p15, 0, %[sctlr], c1, c0, 0"
207 			: : [sctlr] "r" (sctlr)
208 	);
209 }
210 
211 static inline uint32_t read_cpacr(void)
212 {
213 	uint32_t cpacr;
214 
215 	asm volatile ("mrc	p15, 0, %[cpacr], c1, c0, 2"
216 			: [cpacr] "=r" (cpacr)
217 	);
218 
219 	return cpacr;
220 }
221 
222 static inline void write_cpacr(uint32_t cpacr)
223 {
224 	asm volatile ("mcr	p15, 0, %[cpacr], c1, c0, 2"
225 			: : [cpacr] "r" (cpacr)
226 	);
227 }
228 
229 static inline void write_ttbr0(uint32_t ttbr0)
230 {
231 	asm volatile ("mcr	p15, 0, %[ttbr0], c2, c0, 0"
232 			: : [ttbr0] "r" (ttbr0)
233 	);
234 }
235 
236 static inline void write_ttbr0_64bit(uint64_t ttbr0)
237 {
238 	asm volatile ("mcrr	p15, 0, %Q[ttbr0], %R[ttbr0], c2"
239 			: : [ttbr0] "r" (ttbr0)
240 	);
241 }
242 
243 static inline uint32_t read_ttbr0(void)
244 {
245 	uint32_t ttbr0;
246 
247 	asm volatile ("mrc	p15, 0, %[ttbr0], c2, c0, 0"
248 			: [ttbr0] "=r" (ttbr0)
249 	);
250 
251 	return ttbr0;
252 }
253 
254 static inline uint64_t read_ttbr0_64bit(void)
255 {
256 	uint64_t ttbr0;
257 
258 	asm volatile ("mrrc	p15, 0, %Q[ttbr0], %R[ttbr0], c2"
259 			: [ttbr0] "=r" (ttbr0)
260 	);
261 
262 	return ttbr0;
263 }
264 
265 static inline void write_ttbr1(uint32_t ttbr1)
266 {
267 	asm volatile ("mcr	p15, 0, %[ttbr1], c2, c0, 1"
268 			: : [ttbr1] "r" (ttbr1)
269 	);
270 }
271 
272 static inline void write_ttbr1_64bit(uint64_t ttbr1)
273 {
274 	asm volatile ("mcrr	p15, 1, %Q[ttbr1], %R[ttbr1], c2"
275 			: : [ttbr1] "r" (ttbr1)
276 	);
277 }
278 
279 static inline uint32_t read_ttbr1(void)
280 {
281 	uint32_t ttbr1;
282 
283 	asm volatile ("mrc	p15, 0, %[ttbr1], c2, c0, 1"
284 			: [ttbr1] "=r" (ttbr1)
285 	);
286 
287 	return ttbr1;
288 }
289 
290 
291 static inline void write_ttbcr(uint32_t ttbcr)
292 {
293 	asm volatile ("mcr	p15, 0, %[ttbcr], c2, c0, 2"
294 			: : [ttbcr] "r" (ttbcr)
295 	);
296 }
297 
298 static inline uint32_t read_ttbcr(void)
299 {
300 	uint32_t ttbcr;
301 
302 	asm volatile ("mrc	p15, 0, %[ttbcr], c2, c0, 2"
303 			: [ttbcr] "=r" (ttbcr)
304 	);
305 
306 	return ttbcr;
307 }
308 
309 static inline void write_dacr(uint32_t dacr)
310 {
311 	asm volatile ("mcr	p15, 0, %[dacr], c3, c0, 0"
312 			: : [dacr] "r" (dacr)
313 	);
314 }
315 
316 static inline uint32_t read_ifar(void)
317 {
318 	uint32_t ifar;
319 
320 	asm volatile ("mrc	p15, 0, %[ifar], c6, c0, 2"
321 			: [ifar] "=r" (ifar)
322 	);
323 
324 	return ifar;
325 }
326 
327 static inline uint32_t read_dfar(void)
328 {
329 	uint32_t dfar;
330 
331 	asm volatile ("mrc	p15, 0, %[dfar], c6, c0, 0"
332 			: [dfar] "=r" (dfar)
333 	);
334 
335 	return dfar;
336 }
337 
338 static inline uint32_t read_dfsr(void)
339 {
340 	uint32_t dfsr;
341 
342 	asm volatile ("mrc	p15, 0, %[dfsr], c5, c0, 0"
343 			: [dfsr] "=r" (dfsr)
344 	);
345 
346 	return dfsr;
347 }
348 
349 static inline uint32_t read_ifsr(void)
350 {
351 	uint32_t ifsr;
352 
353 	asm volatile ("mrc	p15, 0, %[ifsr], c5, c0, 1"
354 			: [ifsr] "=r" (ifsr)
355 	);
356 
357 	return ifsr;
358 }
359 
360 static inline void write_scr(uint32_t scr)
361 {
362 	asm volatile ("mcr	p15, 0, %[scr], c1, c1, 0"
363 			: : [scr] "r" (scr)
364 	);
365 }
366 
367 static inline void isb(void)
368 {
369 	asm volatile ("isb");
370 }
371 
372 static inline void dsb(void)
373 {
374 	asm volatile ("dsb");
375 }
376 
377 static inline void dsb_ish(void)
378 {
379 	asm volatile ("dsb ish");
380 }
381 
382 static inline void dsb_ishst(void)
383 {
384 	asm volatile ("dsb ishst");
385 }
386 
387 static inline void dmb(void)
388 {
389 	asm volatile ("dmb");
390 }
391 
392 static inline void sev(void)
393 {
394 	asm volatile ("sev");
395 }
396 
397 static inline void wfe(void)
398 {
399 	asm volatile ("wfe");
400 }
401 
402 /* Address translate privileged write translation (current state secure PL1) */
403 static inline void write_ats1cpw(uint32_t va)
404 {
405 	asm volatile ("mcr	p15, 0, %0, c7, c8, 1" : : "r" (va));
406 }
407 
408 static inline void write_ats1cpr(uint32_t va)
409 {
410 	asm volatile ("mcr	p15, 0, %0, c7, c8, 0" : : "r" (va));
411 }
412 
413 static inline void write_ats1cpuw(uint32_t va)
414 {
415 	asm volatile ("mcr	p15, 0, %0, c7, c8, 3" : : "r" (va));
416 }
417 
418 static inline void write_ats1cpur(uint32_t va)
419 {
420 	asm volatile ("mcr	p15, 0, %0, c7, c8, 2" : : "r" (va));
421 }
422 
423 static inline uint32_t read_par32(void)
424 {
425 	uint32_t val;
426 
427 	asm volatile ("mrc	p15, 0, %0, c7, c4, 0" : "=r" (val));
428 	return val;
429 }
430 
431 #ifdef CFG_WITH_LPAE
432 static inline uint64_t read_par64(void)
433 {
434 	uint64_t val;
435 
436 	asm volatile ("mrrc	p15, 0, %Q0, %R0, c7" : "=r" (val));
437 	return val;
438 }
439 #endif
440 
441 static inline void write_tlbimvaais(uint32_t mva)
442 {
443 	asm volatile ("mcr	p15, 0, %[mva], c8, c3, 3"
444 			: : [mva] "r" (mva)
445 	);
446 }
447 
448 static inline void write_mair0(uint32_t mair0)
449 {
450 	asm volatile ("mcr	p15, 0, %[mair0], c10, c2, 0"
451 			: : [mair0] "r" (mair0)
452 	);
453 }
454 
455 static inline void write_prrr(uint32_t prrr)
456 {
457 	/*
458 	 * Same physical register as MAIR0.
459 	 *
460 	 * When an implementation includes the Large Physical Address
461 	 * Extension, and address translation is using the Long-descriptor
462 	 * translation table formats, MAIR0 replaces the PRRR
463 	 */
464 	write_mair0(prrr);
465 }
466 
467 static inline void write_mair1(uint32_t mair1)
468 {
469 	asm volatile ("mcr	p15, 0, %[mair1], c10, c2, 1"
470 			: : [mair1] "r" (mair1)
471 	);
472 }
473 
474 static inline void write_nmrr(uint32_t nmrr)
475 {
476 	/*
477 	 * Same physical register as MAIR1.
478 	 *
479 	 * When an implementation includes the Large Physical Address
480 	 * Extension, and address translation is using the Long-descriptor
481 	 * translation table formats, MAIR1 replaces the NMRR
482 	 */
483 	write_mair1(nmrr);
484 }
485 
486 static inline uint32_t read_contextidr(void)
487 {
488 	uint32_t contextidr;
489 
490 	asm volatile ("mrc	p15, 0, %[contextidr], c13, c0, 1"
491 			: [contextidr] "=r" (contextidr)
492 	);
493 
494 	return contextidr;
495 }
496 
497 static inline void write_contextidr(uint32_t contextidr)
498 {
499 	asm volatile ("mcr	p15, 0, %[contextidr], c13, c0, 1"
500 			: : [contextidr] "r" (contextidr)
501 	);
502 }
503 
504 static inline uint32_t read_cpsr(void)
505 {
506 	uint32_t cpsr;
507 
508 	asm volatile ("mrs	%[cpsr], cpsr"
509 			: [cpsr] "=r" (cpsr)
510 	);
511 	return cpsr;
512 }
513 
514 static inline void write_cpsr(uint32_t cpsr)
515 {
516 	asm volatile ("msr	cpsr_fsxc, %[cpsr]"
517 			: : [cpsr] "r" (cpsr)
518 	);
519 }
520 
521 static inline uint32_t read_spsr(void)
522 {
523 	uint32_t spsr;
524 
525 	asm volatile ("mrs	%[spsr], spsr"
526 			: [spsr] "=r" (spsr)
527 	);
528 	return spsr;
529 }
530 
531 static inline uint32_t read_actlr(void)
532 {
533 	uint32_t actlr;
534 
535 	asm volatile ("mrc	p15, 0, %[actlr], c1, c0, 1"
536 			: [actlr] "=r" (actlr)
537 	);
538 
539 	return actlr;
540 }
541 
542 static inline void write_actlr(uint32_t actlr)
543 {
544 	asm volatile ("mcr	p15, 0, %[actlr], c1, c0, 1"
545 			: : [actlr] "r" (actlr)
546 	);
547 }
548 
549 static inline uint32_t read_nsacr(void)
550 {
551 	uint32_t nsacr;
552 
553 	asm volatile ("mrc	p15, 0, %[nsacr], c1, c1, 2"
554 			: [nsacr] "=r" (nsacr)
555 	);
556 
557 	return nsacr;
558 }
559 
560 static inline void write_nsacr(uint32_t nsacr)
561 {
562 	asm volatile ("mcr	p15, 0, %[nsacr], c1, c1, 2"
563 			: : [nsacr] "r" (nsacr)
564 	);
565 }
566 
567 static inline uint64_t read_cntpct(void)
568 {
569 	uint64_t val;
570 
571 	asm volatile("mrrc p15, 0, %Q0, %R0, c14" : "=r" (val));
572 	return val;
573 }
574 
575 static inline uint32_t read_cntfrq(void)
576 {
577 	uint32_t frq;
578 
579 	asm volatile("mrc p15, 0, %0, c14, c0, 0" : "=r" (frq));
580 	return frq;
581 }
582 
583 static inline void write_cntfrq(uint32_t frq)
584 {
585 	asm volatile("mcr p15, 0, %0, c14, c0, 0" : : "r" (frq));
586 }
587 
588 static inline uint32_t read_cntkctl(void)
589 {
590 	uint32_t cntkctl;
591 
592 	asm volatile("mrc p15, 0, %0, c14, c1, 0" : "=r" (cntkctl));
593 	return cntkctl;
594 }
595 
596 static inline void write_cntkctl(uint32_t cntkctl)
597 {
598 	asm volatile("mcr p15, 0, %0, c14, c1, 0" : : "r" (cntkctl));
599 }
600 
601 static __always_inline uint32_t read_pc(void)
602 {
603 	uint32_t val;
604 
605 	asm volatile ("adr %0, ." : "=r" (val));
606 	return val;
607 }
608 
609 static __always_inline uint32_t read_sp(void)
610 {
611 	uint32_t val;
612 
613 	asm volatile ("mov %0, sp" : "=r" (val));
614 	return val;
615 }
616 
617 static __always_inline uint32_t read_lr(void)
618 {
619 	uint32_t val;
620 
621 	asm volatile ("mov %0, lr" : "=r" (val));
622 	return val;
623 }
624 
625 static __always_inline uint32_t read_fp(void)
626 {
627 	uint32_t val;
628 
629 	asm volatile ("mov %0, fp" : "=r" (val));
630 	return val;
631 }
632 
633 static __always_inline uint32_t read_r7(void)
634 {
635 	uint32_t val;
636 
637 	asm volatile ("mov %0, r7" : "=r" (val));
638 	return val;
639 }
640 
641 /* Register read/write functions for GICC registers by using system interface */
642 static inline uint32_t read_icc_ctlr(void)
643 {
644 	uint32_t v;
645 
646 	asm volatile ("mrc p15,0,%0,c12,c12,4" : "=r" (v));
647 	return v;
648 }
649 
650 static inline void write_icc_ctlr(uint32_t v)
651 {
652 	asm volatile ("mcr p15,0,%0,c12,c12,4" : : "r" (v));
653 }
654 
655 static inline void write_icc_pmr(uint32_t v)
656 {
657 	asm volatile ("mcr p15,0,%0,c4,c6,0" : : "r" (v));
658 }
659 
660 static inline uint32_t read_icc_iar0(void)
661 {
662 	uint32_t v;
663 
664 	asm volatile ("mrc p15,0,%0,c12,c8,0" : "=r" (v));
665 	return v;
666 }
667 
668 static inline void write_icc_eoir0(uint32_t v)
669 {
670 	asm volatile ("mcr p15,0,%0,c12,c8,1" : : "r" (v));
671 }
672 
673 static inline uint64_t read_pmu_ccnt(void)
674 {
675 	uint32_t val;
676 
677 	asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r"(val));
678 	return val;
679 }
680 
681 static inline void wfi(void)
682 {
683 	asm volatile("wfi");
684 }
685 #endif /*ASM*/
686 
687 #endif /*ARM32_H*/
688