xref: /optee_os/core/arch/arm/include/arm32.h (revision 8d5160de5452fc8a007e59621c030eb3fcf21858)
1 /* SPDX-License-Identifier: BSD-2-Clause */
2 /*
3  * Copyright (c) 2016, Linaro Limited
4  * Copyright (c) 2014, STMicroelectronics International N.V.
5  * All rights reserved.
6  *
7  * Redistribution and use in source and binary forms, with or without
8  * modification, are permitted provided that the following conditions are met:
9  *
10  * 1. Redistributions of source code must retain the above copyright notice,
11  * this list of conditions and the following disclaimer.
12  *
13  * 2. Redistributions in binary form must reproduce the above copyright notice,
14  * this list of conditions and the following disclaimer in the documentation
15  * and/or other materials provided with the distribution.
16  *
17  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
21  * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27  * POSSIBILITY OF SUCH DAMAGE.
28  */
29 
30 #ifndef ARM32_H
31 #define ARM32_H
32 
33 #include <sys/cdefs.h>
34 #include <stdint.h>
35 #include <util.h>
36 
37 #define CPSR_MODE_MASK	ARM32_CPSR_MODE_MASK
38 #define CPSR_MODE_USR	ARM32_CPSR_MODE_USR
39 #define CPSR_MODE_FIQ	ARM32_CPSR_MODE_FIQ
40 #define CPSR_MODE_IRQ	ARM32_CPSR_MODE_IRQ
41 #define CPSR_MODE_SVC	ARM32_CPSR_MODE_SVC
42 #define CPSR_MODE_MON	ARM32_CPSR_MODE_MON
43 #define CPSR_MODE_ABT	ARM32_CPSR_MODE_ABT
44 #define CPSR_MODE_UND	ARM32_CPSR_MODE_UND
45 #define CPSR_MODE_SYS	ARM32_CPSR_MODE_SYS
46 
47 #define CPSR_T		ARM32_CPSR_T
48 #define CPSR_F_SHIFT	ARM32_CPSR_F_SHIFT
49 #define CPSR_F		ARM32_CPSR_F
50 #define CPSR_I		ARM32_CPSR_I
51 #define CPSR_A		ARM32_CPSR_A
52 #define CPSR_FIA	ARM32_CPSR_FIA
53 #define CPSR_IT_MASK	ARM32_CPSR_IT_MASK
54 #define CPSR_IT_MASK1	ARM32_CPSR_IT_MASK1
55 #define CPSR_IT_MASK2	ARM32_CPSR_IT_MASK2
56 
57 #define SCR_NS		BIT32(0)
58 #define SCR_IRQ		BIT32(1)
59 #define SCR_FIQ		BIT32(2)
60 #define SCR_EA		BIT32(3)
61 #define SCR_FW		BIT32(4)
62 #define SCR_AW		BIT32(5)
63 #define SCR_NET		BIT32(6)
64 #define SCR_SCD		BIT32(7)
65 #define SCR_HCE		BIT32(8)
66 #define SCR_SIF		BIT32(9)
67 
68 #define SCTLR_M		BIT32(0)
69 #define SCTLR_A		BIT32(1)
70 #define SCTLR_C		BIT32(2)
71 #define SCTLR_CP15BEN	BIT32(5)
72 #define SCTLR_SW	BIT32(10)
73 #define SCTLR_Z		BIT32(11)
74 #define SCTLR_I		BIT32(12)
75 #define SCTLR_V		BIT32(13)
76 #define SCTLR_RR	BIT32(14)
77 #define SCTLR_HA	BIT32(17)
78 #define SCTLR_WXN	BIT32(19)
79 #define SCTLR_UWXN	BIT32(20)
80 #define SCTLR_FI	BIT32(21)
81 #define SCTLR_VE	BIT32(24)
82 #define SCTLR_EE	BIT32(25)
83 #define SCTLR_NMFI	BIT32(26)
84 #define SCTLR_TRE	BIT32(28)
85 #define SCTLR_AFE	BIT32(29)
86 #define SCTLR_TE	BIT32(30)
87 
88 /* Only valid for Cortex-A15 */
89 #define ACTLR_CA15_ENABLE_INVALIDATE_BTB	BIT(0)
90 /* Only valid for Cortex-A8 */
91 #define ACTLR_CA8_ENABLE_INVALIDATE_BTB		BIT(6)
92 /* Only valid for Cortex-A9 */
93 #define ACTLR_CA9_WFLZ				BIT(3)
94 
95 #define ACTLR_SMP	BIT32(6)
96 
97 #define NSACR_CP10	BIT32(10)
98 #define NSACR_CP11	BIT32(11)
99 #define NSACR_NSD32DIS	BIT32(14)
100 #define NSACR_NSASEDIS	BIT32(15)
101 #define NSACR_NS_L2ERR	BIT32(17)
102 #define NSACR_NS_SMP	BIT32(18)
103 
104 #define CPACR_ASEDIS	BIT32(31)
105 #define CPACR_D32DIS	BIT32(30)
106 #define CPACR_CP(co_proc, access)	SHIFT_U32((access), ((co_proc) * 2))
107 #define CPACR_CP_ACCESS_DENIED		0x0
108 #define CPACR_CP_ACCESS_PL1_ONLY	0x1
109 #define CPACR_CP_ACCESS_FULL		0x3
110 
111 
112 #define DACR_DOMAIN(num, perm)		SHIFT_U32((perm), ((num) * 2))
113 #define DACR_DOMAIN_PERM_NO_ACCESS	0x0
114 #define DACR_DOMAIN_PERM_CLIENT		0x1
115 #define DACR_DOMAIN_PERM_MANAGER	0x3
116 
117 #define PAR_F			BIT32(0)
118 #define PAR_SS			BIT32(1)
119 #define PAR_LPAE		BIT32(11)
120 #define PAR_PA_SHIFT		12
121 #define PAR32_PA_MASK		(BIT32(20) - 1)
122 #define PAR64_PA_MASK		(BIT64(28) - 1)
123 
124 /*
125  * TTBCR has different register layout if LPAE is enabled or not.
126  * TTBCR.EAE == 0 => LPAE is not enabled
127  * TTBCR.EAE == 1 => LPAE is enabled
128  */
129 #define TTBCR_EAE	BIT32(31)
130 
131 /* When TTBCR.EAE == 0 */
132 #define TTBCR_PD0	BIT32(4)
133 #define TTBCR_PD1	BIT32(5)
134 
135 /* When TTBCR.EAE == 1 */
136 #define TTBCR_T0SZ_SHIFT	0
137 #define TTBCR_EPD0		BIT32(7)
138 #define TTBCR_IRGN0_SHIFT	8
139 #define TTBCR_ORGN0_SHIFT	10
140 #define TTBCR_SH0_SHIFT		12
141 #define TTBCR_T1SZ_SHIFT	16
142 #define TTBCR_A1		BIT32(22)
143 #define TTBCR_EPD1		BIT32(23)
144 #define TTBCR_IRGN1_SHIFT	24
145 #define TTBCR_ORGN1_SHIFT	26
146 #define TTBCR_SH1_SHIFT		28
147 
148 /* Normal memory, Inner/Outer Non-cacheable */
149 #define TTBCR_XRGNX_NC		0x0
150 /* Normal memory, Inner/Outer Write-Back Write-Allocate Cacheable */
151 #define TTBCR_XRGNX_WB		0x1
152 /* Normal memory, Inner/Outer Write-Through Cacheable */
153 #define TTBCR_XRGNX_WT		0x2
154 /* Normal memory, Inner/Outer Write-Back no Write-Allocate Cacheable */
155 #define TTBCR_XRGNX_WBWA	0x3
156 
157 /* Non-shareable */
158 #define TTBCR_SHX_NSH		0x0
159 /* Outer Shareable */
160 #define TTBCR_SHX_OSH		0x2
161 /* Inner Shareable */
162 #define TTBCR_SHX_ISH		0x3
163 
164 #define TTBR_ASID_MASK		0xff
165 #define TTBR_ASID_SHIFT		48
166 
167 
168 #define FSR_LPAE		BIT32(9)
169 #define FSR_WNR			BIT32(11)
170 
171 /* Valid if FSR.LPAE is 1 */
172 #define FSR_STATUS_MASK		(BIT32(6) - 1)
173 
174 /* Valid if FSR.LPAE is 0 */
175 #define FSR_FS_MASK		(BIT32(10) | (BIT32(4) - 1))
176 
177 /* ID_PFR1 bit fields */
178 #define IDPFR1_VIRT_SHIFT            12
179 #define IDPFR1_VIRT_MASK             (0xF << IDPFR1_VIRT_SHIFT)
180 #define IDPFR1_GENTIMER_SHIFT        16
181 #define IDPFR1_GENTIMER_MASK         (0xF << IDPFR1_GENTIMER_SHIFT)
182 
183 #ifndef ASM
184 static inline uint32_t read_midr(void)
185 {
186 	uint32_t midr;
187 
188 	asm volatile ("mrc	p15, 0, %[midr], c0, c0, 0"
189 			: [midr] "=r" (midr)
190 	);
191 
192 	return midr;
193 }
194 
195 static inline uint32_t read_mpidr(void)
196 {
197 	uint32_t mpidr;
198 
199 	asm volatile ("mrc	p15, 0, %[mpidr], c0, c0, 5"
200 			: [mpidr] "=r" (mpidr)
201 	);
202 
203 	return mpidr;
204 }
205 
206 static inline uint32_t read_sctlr(void)
207 {
208 	uint32_t sctlr;
209 
210 	asm volatile ("mrc	p15, 0, %[sctlr], c1, c0, 0"
211 			: [sctlr] "=r" (sctlr)
212 	);
213 
214 	return sctlr;
215 }
216 
217 static inline void write_sctlr(uint32_t sctlr)
218 {
219 	asm volatile ("mcr	p15, 0, %[sctlr], c1, c0, 0"
220 			: : [sctlr] "r" (sctlr)
221 	);
222 }
223 
224 static inline uint32_t read_cpacr(void)
225 {
226 	uint32_t cpacr;
227 
228 	asm volatile ("mrc	p15, 0, %[cpacr], c1, c0, 2"
229 			: [cpacr] "=r" (cpacr)
230 	);
231 
232 	return cpacr;
233 }
234 
235 static inline void write_cpacr(uint32_t cpacr)
236 {
237 	asm volatile ("mcr	p15, 0, %[cpacr], c1, c0, 2"
238 			: : [cpacr] "r" (cpacr)
239 	);
240 }
241 
242 static inline void write_ttbr0(uint32_t ttbr0)
243 {
244 	asm volatile ("mcr	p15, 0, %[ttbr0], c2, c0, 0"
245 			: : [ttbr0] "r" (ttbr0)
246 	);
247 }
248 
249 static inline void write_ttbr0_64bit(uint64_t ttbr0)
250 {
251 	asm volatile ("mcrr	p15, 0, %Q[ttbr0], %R[ttbr0], c2"
252 			: : [ttbr0] "r" (ttbr0)
253 	);
254 }
255 
256 static inline uint32_t read_ttbr0(void)
257 {
258 	uint32_t ttbr0;
259 
260 	asm volatile ("mrc	p15, 0, %[ttbr0], c2, c0, 0"
261 			: [ttbr0] "=r" (ttbr0)
262 	);
263 
264 	return ttbr0;
265 }
266 
267 static inline uint64_t read_ttbr0_64bit(void)
268 {
269 	uint64_t ttbr0;
270 
271 	asm volatile ("mrrc	p15, 0, %Q[ttbr0], %R[ttbr0], c2"
272 			: [ttbr0] "=r" (ttbr0)
273 	);
274 
275 	return ttbr0;
276 }
277 
278 static inline void write_ttbr1(uint32_t ttbr1)
279 {
280 	asm volatile ("mcr	p15, 0, %[ttbr1], c2, c0, 1"
281 			: : [ttbr1] "r" (ttbr1)
282 	);
283 }
284 
285 static inline void write_ttbr1_64bit(uint64_t ttbr1)
286 {
287 	asm volatile ("mcrr	p15, 1, %Q[ttbr1], %R[ttbr1], c2"
288 			: : [ttbr1] "r" (ttbr1)
289 	);
290 }
291 
292 static inline uint32_t read_ttbr1(void)
293 {
294 	uint32_t ttbr1;
295 
296 	asm volatile ("mrc	p15, 0, %[ttbr1], c2, c0, 1"
297 			: [ttbr1] "=r" (ttbr1)
298 	);
299 
300 	return ttbr1;
301 }
302 
303 
304 static inline void write_ttbcr(uint32_t ttbcr)
305 {
306 	asm volatile ("mcr	p15, 0, %[ttbcr], c2, c0, 2"
307 			: : [ttbcr] "r" (ttbcr)
308 	);
309 }
310 
311 static inline uint32_t read_ttbcr(void)
312 {
313 	uint32_t ttbcr;
314 
315 	asm volatile ("mrc	p15, 0, %[ttbcr], c2, c0, 2"
316 			: [ttbcr] "=r" (ttbcr)
317 	);
318 
319 	return ttbcr;
320 }
321 
322 static inline void write_dacr(uint32_t dacr)
323 {
324 	asm volatile ("mcr	p15, 0, %[dacr], c3, c0, 0"
325 			: : [dacr] "r" (dacr)
326 	);
327 }
328 
329 static inline uint32_t read_ifar(void)
330 {
331 	uint32_t ifar;
332 
333 	asm volatile ("mrc	p15, 0, %[ifar], c6, c0, 2"
334 			: [ifar] "=r" (ifar)
335 	);
336 
337 	return ifar;
338 }
339 
340 static inline uint32_t read_dfar(void)
341 {
342 	uint32_t dfar;
343 
344 	asm volatile ("mrc	p15, 0, %[dfar], c6, c0, 0"
345 			: [dfar] "=r" (dfar)
346 	);
347 
348 	return dfar;
349 }
350 
351 static inline uint32_t read_dfsr(void)
352 {
353 	uint32_t dfsr;
354 
355 	asm volatile ("mrc	p15, 0, %[dfsr], c5, c0, 0"
356 			: [dfsr] "=r" (dfsr)
357 	);
358 
359 	return dfsr;
360 }
361 
362 static inline uint32_t read_ifsr(void)
363 {
364 	uint32_t ifsr;
365 
366 	asm volatile ("mrc	p15, 0, %[ifsr], c5, c0, 1"
367 			: [ifsr] "=r" (ifsr)
368 	);
369 
370 	return ifsr;
371 }
372 
373 static inline void write_scr(uint32_t scr)
374 {
375 	asm volatile ("mcr	p15, 0, %[scr], c1, c1, 0"
376 			: : [scr] "r" (scr)
377 	);
378 }
379 
380 static inline void isb(void)
381 {
382 	asm volatile ("isb");
383 }
384 
385 static inline void dsb(void)
386 {
387 	asm volatile ("dsb");
388 }
389 
390 static inline void dsb_ish(void)
391 {
392 	asm volatile ("dsb ish");
393 }
394 
395 static inline void dsb_ishst(void)
396 {
397 	asm volatile ("dsb ishst");
398 }
399 
400 static inline void dmb(void)
401 {
402 	asm volatile ("dmb");
403 }
404 
405 static inline void sev(void)
406 {
407 	asm volatile ("sev");
408 }
409 
410 static inline void wfe(void)
411 {
412 	asm volatile ("wfe");
413 }
414 
415 /* Address translate privileged write translation (current state secure PL1) */
416 static inline void write_ats1cpw(uint32_t va)
417 {
418 	asm volatile ("mcr	p15, 0, %0, c7, c8, 1" : : "r" (va));
419 }
420 
421 static inline void write_ats1cpr(uint32_t va)
422 {
423 	asm volatile ("mcr	p15, 0, %0, c7, c8, 0" : : "r" (va));
424 }
425 
426 static inline void write_ats1cpuw(uint32_t va)
427 {
428 	asm volatile ("mcr	p15, 0, %0, c7, c8, 3" : : "r" (va));
429 }
430 
431 static inline void write_ats1cpur(uint32_t va)
432 {
433 	asm volatile ("mcr	p15, 0, %0, c7, c8, 2" : : "r" (va));
434 }
435 
436 static inline uint32_t read_par32(void)
437 {
438 	uint32_t val;
439 
440 	asm volatile ("mrc	p15, 0, %0, c7, c4, 0" : "=r" (val));
441 	return val;
442 }
443 
444 #ifdef CFG_WITH_LPAE
445 static inline uint64_t read_par64(void)
446 {
447 	uint64_t val;
448 
449 	asm volatile ("mrrc	p15, 0, %Q0, %R0, c7" : "=r" (val));
450 	return val;
451 }
452 #endif
453 
454 static inline void write_tlbimvaais(uint32_t mva)
455 {
456 	asm volatile ("mcr	p15, 0, %[mva], c8, c3, 3"
457 			: : [mva] "r" (mva)
458 	);
459 }
460 
461 static inline void write_mair0(uint32_t mair0)
462 {
463 	asm volatile ("mcr	p15, 0, %[mair0], c10, c2, 0"
464 			: : [mair0] "r" (mair0)
465 	);
466 }
467 
468 static inline void write_prrr(uint32_t prrr)
469 {
470 	/*
471 	 * Same physical register as MAIR0.
472 	 *
473 	 * When an implementation includes the Large Physical Address
474 	 * Extension, and address translation is using the Long-descriptor
475 	 * translation table formats, MAIR0 replaces the PRRR
476 	 */
477 	write_mair0(prrr);
478 }
479 
480 static inline void write_mair1(uint32_t mair1)
481 {
482 	asm volatile ("mcr	p15, 0, %[mair1], c10, c2, 1"
483 			: : [mair1] "r" (mair1)
484 	);
485 }
486 
487 static inline void write_nmrr(uint32_t nmrr)
488 {
489 	/*
490 	 * Same physical register as MAIR1.
491 	 *
492 	 * When an implementation includes the Large Physical Address
493 	 * Extension, and address translation is using the Long-descriptor
494 	 * translation table formats, MAIR1 replaces the NMRR
495 	 */
496 	write_mair1(nmrr);
497 }
498 
499 static inline uint32_t read_contextidr(void)
500 {
501 	uint32_t contextidr;
502 
503 	asm volatile ("mrc	p15, 0, %[contextidr], c13, c0, 1"
504 			: [contextidr] "=r" (contextidr)
505 	);
506 
507 	return contextidr;
508 }
509 
510 static inline void write_contextidr(uint32_t contextidr)
511 {
512 	asm volatile ("mcr	p15, 0, %[contextidr], c13, c0, 1"
513 			: : [contextidr] "r" (contextidr)
514 	);
515 }
516 
517 static inline uint32_t read_cpsr(void)
518 {
519 	uint32_t cpsr;
520 
521 	asm volatile ("mrs	%[cpsr], cpsr"
522 			: [cpsr] "=r" (cpsr)
523 	);
524 	return cpsr;
525 }
526 
527 static inline void write_cpsr(uint32_t cpsr)
528 {
529 	asm volatile ("msr	cpsr_fsxc, %[cpsr]"
530 			: : [cpsr] "r" (cpsr)
531 	);
532 }
533 
534 static inline uint32_t read_spsr(void)
535 {
536 	uint32_t spsr;
537 
538 	asm volatile ("mrs	%[spsr], spsr"
539 			: [spsr] "=r" (spsr)
540 	);
541 	return spsr;
542 }
543 
544 static inline uint32_t read_actlr(void)
545 {
546 	uint32_t actlr;
547 
548 	asm volatile ("mrc	p15, 0, %[actlr], c1, c0, 1"
549 			: [actlr] "=r" (actlr)
550 	);
551 
552 	return actlr;
553 }
554 
555 static inline void write_actlr(uint32_t actlr)
556 {
557 	asm volatile ("mcr	p15, 0, %[actlr], c1, c0, 1"
558 			: : [actlr] "r" (actlr)
559 	);
560 }
561 
562 static inline uint32_t read_nsacr(void)
563 {
564 	uint32_t nsacr;
565 
566 	asm volatile ("mrc	p15, 0, %[nsacr], c1, c1, 2"
567 			: [nsacr] "=r" (nsacr)
568 	);
569 
570 	return nsacr;
571 }
572 
573 static inline void write_nsacr(uint32_t nsacr)
574 {
575 	asm volatile ("mcr	p15, 0, %[nsacr], c1, c1, 2"
576 			: : [nsacr] "r" (nsacr)
577 	);
578 }
579 
580 static inline uint64_t read_cntpct(void)
581 {
582 	uint64_t val;
583 
584 	asm volatile("mrrc p15, 0, %Q0, %R0, c14" : "=r" (val));
585 	return val;
586 }
587 
588 static inline uint32_t read_cntfrq(void)
589 {
590 	uint32_t frq;
591 
592 	asm volatile("mrc p15, 0, %0, c14, c0, 0" : "=r" (frq));
593 	return frq;
594 }
595 
596 static inline void write_cntfrq(uint32_t frq)
597 {
598 	asm volatile("mcr p15, 0, %0, c14, c0, 0" : : "r" (frq));
599 }
600 
601 static inline uint32_t read_cntkctl(void)
602 {
603 	uint32_t cntkctl;
604 
605 	asm volatile("mrc p15, 0, %0, c14, c1, 0" : "=r" (cntkctl));
606 	return cntkctl;
607 }
608 
609 static inline void write_cntkctl(uint32_t cntkctl)
610 {
611 	asm volatile("mcr p15, 0, %0, c14, c1, 0" : : "r" (cntkctl));
612 }
613 
614 static __always_inline uint32_t read_pc(void)
615 {
616 	uint32_t val;
617 
618 	asm volatile ("adr %0, ." : "=r" (val));
619 	return val;
620 }
621 
622 static __always_inline uint32_t read_sp(void)
623 {
624 	uint32_t val;
625 
626 	asm volatile ("mov %0, sp" : "=r" (val));
627 	return val;
628 }
629 
630 static __always_inline uint32_t read_lr(void)
631 {
632 	uint32_t val;
633 
634 	asm volatile ("mov %0, lr" : "=r" (val));
635 	return val;
636 }
637 
638 static __always_inline uint32_t read_fp(void)
639 {
640 	uint32_t val;
641 
642 	asm volatile ("mov %0, fp" : "=r" (val));
643 	return val;
644 }
645 
646 static __always_inline uint32_t read_r7(void)
647 {
648 	uint32_t val;
649 
650 	asm volatile ("mov %0, r7" : "=r" (val));
651 	return val;
652 }
653 
654 /* Register read/write functions for GICC registers by using system interface */
655 static inline uint32_t read_icc_ctlr(void)
656 {
657 	uint32_t v;
658 
659 	asm volatile ("mrc p15,0,%0,c12,c12,4" : "=r" (v));
660 	return v;
661 }
662 
663 static inline void write_icc_ctlr(uint32_t v)
664 {
665 	asm volatile ("mcr p15,0,%0,c12,c12,4" : : "r" (v));
666 }
667 
668 static inline void write_icc_pmr(uint32_t v)
669 {
670 	asm volatile ("mcr p15,0,%0,c4,c6,0" : : "r" (v));
671 }
672 
673 static inline uint32_t read_icc_iar0(void)
674 {
675 	uint32_t v;
676 
677 	asm volatile ("mrc p15,0,%0,c12,c8,0" : "=r" (v));
678 	return v;
679 }
680 
681 static inline void write_icc_eoir0(uint32_t v)
682 {
683 	asm volatile ("mcr p15,0,%0,c12,c8,1" : : "r" (v));
684 }
685 
686 static inline uint64_t read_pmu_ccnt(void)
687 {
688 	uint32_t val;
689 
690 	asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r"(val));
691 	return val;
692 }
693 
694 static inline void wfi(void)
695 {
696 	asm volatile("wfi");
697 }
698 #endif /*ASM*/
699 
700 #endif /*ARM32_H*/
701