xref: /optee_os/core/arch/arm/include/arm32.h (revision 02349cdb6ada3be9d11f385197e8956fea652ecf)
1 /* SPDX-License-Identifier: BSD-2-Clause */
2 /*
3  * Copyright (c) 2016, Linaro Limited
4  * Copyright (c) 2014, STMicroelectronics International N.V.
5  * All rights reserved.
6  *
7  * Redistribution and use in source and binary forms, with or without
8  * modification, are permitted provided that the following conditions are met:
9  *
10  * 1. Redistributions of source code must retain the above copyright notice,
11  * this list of conditions and the following disclaimer.
12  *
13  * 2. Redistributions in binary form must reproduce the above copyright notice,
14  * this list of conditions and the following disclaimer in the documentation
15  * and/or other materials provided with the distribution.
16  *
17  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
21  * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27  * POSSIBILITY OF SUCH DAMAGE.
28  */
29 
30 #ifndef ARM32_H
31 #define ARM32_H
32 
33 #include <sys/cdefs.h>
34 #include <stdint.h>
35 #include <util.h>
36 
37 #define CORTEX_A7_PART_NUM		0xC07
38 #define CORTEX_A8_PART_NUM		0xC08
39 #define CORTEX_A9_PART_NUM		0xC09
40 #define CORTEX_A15_PART_NUM		0xC0F
41 #define CORTEX_A17_PART_NUM		0xC0E
42 #define CORTEX_A57_PART_NUM		0xD07
43 #define CORTEX_A72_PART_NUM		0xD08
44 #define CORTEX_A73_PART_NUM		0xD09
45 #define CORTEX_A75_PART_NUM		0xD0A
46 
47 #define MIDR_PRIMARY_PART_NUM_SHIFT	4
48 #define MIDR_PRIMARY_PART_NUM_WIDTH	12
49 
50 #define MIDR_IMPLEMENTER_SHIFT		24
51 #define MIDR_IMPLEMENTER_WIDTH		8
52 #define MIDR_IMPLEMENTER_ARM		0x41
53 
54 #define CPSR_MODE_MASK	ARM32_CPSR_MODE_MASK
55 #define CPSR_MODE_USR	ARM32_CPSR_MODE_USR
56 #define CPSR_MODE_FIQ	ARM32_CPSR_MODE_FIQ
57 #define CPSR_MODE_IRQ	ARM32_CPSR_MODE_IRQ
58 #define CPSR_MODE_SVC	ARM32_CPSR_MODE_SVC
59 #define CPSR_MODE_MON	ARM32_CPSR_MODE_MON
60 #define CPSR_MODE_ABT	ARM32_CPSR_MODE_ABT
61 #define CPSR_MODE_UND	ARM32_CPSR_MODE_UND
62 #define CPSR_MODE_SYS	ARM32_CPSR_MODE_SYS
63 
64 #define CPSR_T		ARM32_CPSR_T
65 #define CPSR_F_SHIFT	ARM32_CPSR_F_SHIFT
66 #define CPSR_F		ARM32_CPSR_F
67 #define CPSR_I		ARM32_CPSR_I
68 #define CPSR_A		ARM32_CPSR_A
69 #define CPSR_FIA	ARM32_CPSR_FIA
70 #define CPSR_IT_MASK	ARM32_CPSR_IT_MASK
71 #define CPSR_IT_MASK1	ARM32_CPSR_IT_MASK1
72 #define CPSR_IT_MASK2	ARM32_CPSR_IT_MASK2
73 
74 #define SCR_NS		BIT32(0)
75 #define SCR_IRQ		BIT32(1)
76 #define SCR_FIQ		BIT32(2)
77 #define SCR_EA		BIT32(3)
78 #define SCR_FW		BIT32(4)
79 #define SCR_AW		BIT32(5)
80 #define SCR_NET		BIT32(6)
81 #define SCR_SCD		BIT32(7)
82 #define SCR_HCE		BIT32(8)
83 #define SCR_SIF		BIT32(9)
84 
85 #define SCTLR_M		BIT32(0)
86 #define SCTLR_A		BIT32(1)
87 #define SCTLR_C		BIT32(2)
88 #define SCTLR_CP15BEN	BIT32(5)
89 #define SCTLR_SW	BIT32(10)
90 #define SCTLR_Z		BIT32(11)
91 #define SCTLR_I		BIT32(12)
92 #define SCTLR_V		BIT32(13)
93 #define SCTLR_RR	BIT32(14)
94 #define SCTLR_HA	BIT32(17)
95 #define SCTLR_WXN	BIT32(19)
96 #define SCTLR_UWXN	BIT32(20)
97 #define SCTLR_FI	BIT32(21)
98 #define SCTLR_VE	BIT32(24)
99 #define SCTLR_EE	BIT32(25)
100 #define SCTLR_NMFI	BIT32(26)
101 #define SCTLR_TRE	BIT32(28)
102 #define SCTLR_AFE	BIT32(29)
103 #define SCTLR_TE	BIT32(30)
104 
105 /* Only valid for Cortex-A15 */
106 #define ACTLR_CA15_ENABLE_INVALIDATE_BTB	BIT(0)
107 
108 #define ACTLR_SMP	BIT32(6)
109 
110 #define NSACR_CP10	BIT32(10)
111 #define NSACR_CP11	BIT32(11)
112 #define NSACR_NSD32DIS	BIT32(14)
113 #define NSACR_NSASEDIS	BIT32(15)
114 #define NSACR_NS_L2ERR	BIT32(17)
115 #define NSACR_NS_SMP	BIT32(18)
116 
117 #define CPACR_ASEDIS	BIT32(31)
118 #define CPACR_D32DIS	BIT32(30)
119 #define CPACR_CP(co_proc, access)	SHIFT_U32((access), ((co_proc) * 2))
120 #define CPACR_CP_ACCESS_DENIED		0x0
121 #define CPACR_CP_ACCESS_PL1_ONLY	0x1
122 #define CPACR_CP_ACCESS_FULL		0x3
123 
124 
125 #define DACR_DOMAIN(num, perm)		SHIFT_U32((perm), ((num) * 2))
126 #define DACR_DOMAIN_PERM_NO_ACCESS	0x0
127 #define DACR_DOMAIN_PERM_CLIENT		0x1
128 #define DACR_DOMAIN_PERM_MANAGER	0x3
129 
130 #define PAR_F			BIT32(0)
131 #define PAR_SS			BIT32(1)
132 #define PAR_LPAE		BIT32(11)
133 #define PAR_PA_SHIFT		12
134 #define PAR32_PA_MASK		(BIT32(20) - 1)
135 #define PAR64_PA_MASK		(BIT64(28) - 1)
136 
137 /*
138  * TTBCR has different register layout if LPAE is enabled or not.
139  * TTBCR.EAE == 0 => LPAE is not enabled
140  * TTBCR.EAE == 1 => LPAE is enabled
141  */
142 #define TTBCR_EAE	BIT32(31)
143 
144 /* When TTBCR.EAE == 0 */
145 #define TTBCR_PD0	BIT32(4)
146 #define TTBCR_PD1	BIT32(5)
147 
148 /* When TTBCR.EAE == 1 */
149 #define TTBCR_T0SZ_SHIFT	0
150 #define TTBCR_EPD0		BIT32(7)
151 #define TTBCR_IRGN0_SHIFT	8
152 #define TTBCR_ORGN0_SHIFT	10
153 #define TTBCR_SH0_SHIFT		12
154 #define TTBCR_T1SZ_SHIFT	16
155 #define TTBCR_A1		BIT32(22)
156 #define TTBCR_EPD1		BIT32(23)
157 #define TTBCR_IRGN1_SHIFT	24
158 #define TTBCR_ORGN1_SHIFT	26
159 #define TTBCR_SH1_SHIFT		28
160 
161 /* Normal memory, Inner/Outer Non-cacheable */
162 #define TTBCR_XRGNX_NC		0x0
163 /* Normal memory, Inner/Outer Write-Back Write-Allocate Cacheable */
164 #define TTBCR_XRGNX_WB		0x1
165 /* Normal memory, Inner/Outer Write-Through Cacheable */
166 #define TTBCR_XRGNX_WT		0x2
167 /* Normal memory, Inner/Outer Write-Back no Write-Allocate Cacheable */
168 #define TTBCR_XRGNX_WBWA	0x3
169 
170 /* Non-shareable */
171 #define TTBCR_SHX_NSH		0x0
172 /* Outer Shareable */
173 #define TTBCR_SHX_OSH		0x2
174 /* Inner Shareable */
175 #define TTBCR_SHX_ISH		0x3
176 
177 #define TTBR_ASID_MASK		0xff
178 #define TTBR_ASID_SHIFT		48
179 
180 
181 #define FSR_LPAE		BIT32(9)
182 #define FSR_WNR			BIT32(11)
183 
184 /* Valid if FSR.LPAE is 1 */
185 #define FSR_STATUS_MASK		(BIT32(6) - 1)
186 
187 /* Valid if FSR.LPAE is 0 */
188 #define FSR_FS_MASK		(BIT32(10) | (BIT32(4) - 1))
189 
190 #ifndef ASM
191 static inline uint32_t read_mpidr(void)
192 {
193 	uint32_t mpidr;
194 
195 	asm volatile ("mrc	p15, 0, %[mpidr], c0, c0, 5"
196 			: [mpidr] "=r" (mpidr)
197 	);
198 
199 	return mpidr;
200 }
201 
202 static inline uint32_t read_sctlr(void)
203 {
204 	uint32_t sctlr;
205 
206 	asm volatile ("mrc	p15, 0, %[sctlr], c1, c0, 0"
207 			: [sctlr] "=r" (sctlr)
208 	);
209 
210 	return sctlr;
211 }
212 
213 static inline void write_sctlr(uint32_t sctlr)
214 {
215 	asm volatile ("mcr	p15, 0, %[sctlr], c1, c0, 0"
216 			: : [sctlr] "r" (sctlr)
217 	);
218 }
219 
220 static inline uint32_t read_cpacr(void)
221 {
222 	uint32_t cpacr;
223 
224 	asm volatile ("mrc	p15, 0, %[cpacr], c1, c0, 2"
225 			: [cpacr] "=r" (cpacr)
226 	);
227 
228 	return cpacr;
229 }
230 
231 static inline void write_cpacr(uint32_t cpacr)
232 {
233 	asm volatile ("mcr	p15, 0, %[cpacr], c1, c0, 2"
234 			: : [cpacr] "r" (cpacr)
235 	);
236 }
237 
238 static inline void write_ttbr0(uint32_t ttbr0)
239 {
240 	asm volatile ("mcr	p15, 0, %[ttbr0], c2, c0, 0"
241 			: : [ttbr0] "r" (ttbr0)
242 	);
243 }
244 
245 static inline void write_ttbr0_64bit(uint64_t ttbr0)
246 {
247 	asm volatile ("mcrr	p15, 0, %Q[ttbr0], %R[ttbr0], c2"
248 			: : [ttbr0] "r" (ttbr0)
249 	);
250 }
251 
252 static inline uint32_t read_ttbr0(void)
253 {
254 	uint32_t ttbr0;
255 
256 	asm volatile ("mrc	p15, 0, %[ttbr0], c2, c0, 0"
257 			: [ttbr0] "=r" (ttbr0)
258 	);
259 
260 	return ttbr0;
261 }
262 
263 static inline uint64_t read_ttbr0_64bit(void)
264 {
265 	uint64_t ttbr0;
266 
267 	asm volatile ("mrrc	p15, 0, %Q[ttbr0], %R[ttbr0], c2"
268 			: [ttbr0] "=r" (ttbr0)
269 	);
270 
271 	return ttbr0;
272 }
273 
274 static inline void write_ttbr1(uint32_t ttbr1)
275 {
276 	asm volatile ("mcr	p15, 0, %[ttbr1], c2, c0, 1"
277 			: : [ttbr1] "r" (ttbr1)
278 	);
279 }
280 
281 static inline void write_ttbr1_64bit(uint64_t ttbr1)
282 {
283 	asm volatile ("mcrr	p15, 1, %Q[ttbr1], %R[ttbr1], c2"
284 			: : [ttbr1] "r" (ttbr1)
285 	);
286 }
287 
288 static inline uint32_t read_ttbr1(void)
289 {
290 	uint32_t ttbr1;
291 
292 	asm volatile ("mrc	p15, 0, %[ttbr1], c2, c0, 1"
293 			: [ttbr1] "=r" (ttbr1)
294 	);
295 
296 	return ttbr1;
297 }
298 
299 
300 static inline void write_ttbcr(uint32_t ttbcr)
301 {
302 	asm volatile ("mcr	p15, 0, %[ttbcr], c2, c0, 2"
303 			: : [ttbcr] "r" (ttbcr)
304 	);
305 }
306 
307 static inline uint32_t read_ttbcr(void)
308 {
309 	uint32_t ttbcr;
310 
311 	asm volatile ("mrc	p15, 0, %[ttbcr], c2, c0, 2"
312 			: [ttbcr] "=r" (ttbcr)
313 	);
314 
315 	return ttbcr;
316 }
317 
318 static inline void write_dacr(uint32_t dacr)
319 {
320 	asm volatile ("mcr	p15, 0, %[dacr], c3, c0, 0"
321 			: : [dacr] "r" (dacr)
322 	);
323 }
324 
325 static inline uint32_t read_ifar(void)
326 {
327 	uint32_t ifar;
328 
329 	asm volatile ("mrc	p15, 0, %[ifar], c6, c0, 2"
330 			: [ifar] "=r" (ifar)
331 	);
332 
333 	return ifar;
334 }
335 
336 static inline uint32_t read_dfar(void)
337 {
338 	uint32_t dfar;
339 
340 	asm volatile ("mrc	p15, 0, %[dfar], c6, c0, 0"
341 			: [dfar] "=r" (dfar)
342 	);
343 
344 	return dfar;
345 }
346 
347 static inline uint32_t read_dfsr(void)
348 {
349 	uint32_t dfsr;
350 
351 	asm volatile ("mrc	p15, 0, %[dfsr], c5, c0, 0"
352 			: [dfsr] "=r" (dfsr)
353 	);
354 
355 	return dfsr;
356 }
357 
358 static inline uint32_t read_ifsr(void)
359 {
360 	uint32_t ifsr;
361 
362 	asm volatile ("mrc	p15, 0, %[ifsr], c5, c0, 1"
363 			: [ifsr] "=r" (ifsr)
364 	);
365 
366 	return ifsr;
367 }
368 
369 static inline void write_scr(uint32_t scr)
370 {
371 	asm volatile ("mcr	p15, 0, %[scr], c1, c1, 0"
372 			: : [scr] "r" (scr)
373 	);
374 }
375 
376 static inline void isb(void)
377 {
378 	asm volatile ("isb");
379 }
380 
381 static inline void dsb(void)
382 {
383 	asm volatile ("dsb");
384 }
385 
386 static inline void dsb_ish(void)
387 {
388 	asm volatile ("dsb ish");
389 }
390 
391 static inline void dsb_ishst(void)
392 {
393 	asm volatile ("dsb ishst");
394 }
395 
396 static inline void dmb(void)
397 {
398 	asm volatile ("dmb");
399 }
400 
401 static inline void sev(void)
402 {
403 	asm volatile ("sev");
404 }
405 
406 static inline void wfe(void)
407 {
408 	asm volatile ("wfe");
409 }
410 
411 /* Address translate privileged write translation (current state secure PL1) */
412 static inline void write_ats1cpw(uint32_t va)
413 {
414 	asm volatile ("mcr	p15, 0, %0, c7, c8, 1" : : "r" (va));
415 }
416 
417 static inline void write_ats1cpr(uint32_t va)
418 {
419 	asm volatile ("mcr	p15, 0, %0, c7, c8, 0" : : "r" (va));
420 }
421 
422 static inline void write_ats1cpuw(uint32_t va)
423 {
424 	asm volatile ("mcr	p15, 0, %0, c7, c8, 3" : : "r" (va));
425 }
426 
427 static inline void write_ats1cpur(uint32_t va)
428 {
429 	asm volatile ("mcr	p15, 0, %0, c7, c8, 2" : : "r" (va));
430 }
431 
432 static inline uint32_t read_par32(void)
433 {
434 	uint32_t val;
435 
436 	asm volatile ("mrc	p15, 0, %0, c7, c4, 0" : "=r" (val));
437 	return val;
438 }
439 
440 #ifdef CFG_WITH_LPAE
441 static inline uint64_t read_par64(void)
442 {
443 	uint64_t val;
444 
445 	asm volatile ("mrrc	p15, 0, %Q0, %R0, c7" : "=r" (val));
446 	return val;
447 }
448 #endif
449 
450 static inline void write_tlbimvaais(uint32_t mva)
451 {
452 	asm volatile ("mcr	p15, 0, %[mva], c8, c3, 3"
453 			: : [mva] "r" (mva)
454 	);
455 }
456 
457 static inline void write_mair0(uint32_t mair0)
458 {
459 	asm volatile ("mcr	p15, 0, %[mair0], c10, c2, 0"
460 			: : [mair0] "r" (mair0)
461 	);
462 }
463 
464 static inline void write_prrr(uint32_t prrr)
465 {
466 	/*
467 	 * Same physical register as MAIR0.
468 	 *
469 	 * When an implementation includes the Large Physical Address
470 	 * Extension, and address translation is using the Long-descriptor
471 	 * translation table formats, MAIR0 replaces the PRRR
472 	 */
473 	write_mair0(prrr);
474 }
475 
476 static inline void write_mair1(uint32_t mair1)
477 {
478 	asm volatile ("mcr	p15, 0, %[mair1], c10, c2, 1"
479 			: : [mair1] "r" (mair1)
480 	);
481 }
482 
483 static inline void write_nmrr(uint32_t nmrr)
484 {
485 	/*
486 	 * Same physical register as MAIR1.
487 	 *
488 	 * When an implementation includes the Large Physical Address
489 	 * Extension, and address translation is using the Long-descriptor
490 	 * translation table formats, MAIR1 replaces the NMRR
491 	 */
492 	write_mair1(nmrr);
493 }
494 
495 static inline uint32_t read_contextidr(void)
496 {
497 	uint32_t contextidr;
498 
499 	asm volatile ("mrc	p15, 0, %[contextidr], c13, c0, 1"
500 			: [contextidr] "=r" (contextidr)
501 	);
502 
503 	return contextidr;
504 }
505 
506 static inline void write_contextidr(uint32_t contextidr)
507 {
508 	asm volatile ("mcr	p15, 0, %[contextidr], c13, c0, 1"
509 			: : [contextidr] "r" (contextidr)
510 	);
511 }
512 
513 static inline uint32_t read_cpsr(void)
514 {
515 	uint32_t cpsr;
516 
517 	asm volatile ("mrs	%[cpsr], cpsr"
518 			: [cpsr] "=r" (cpsr)
519 	);
520 	return cpsr;
521 }
522 
523 static inline void write_cpsr(uint32_t cpsr)
524 {
525 	asm volatile ("msr	cpsr_fsxc, %[cpsr]"
526 			: : [cpsr] "r" (cpsr)
527 	);
528 }
529 
530 static inline uint32_t read_spsr(void)
531 {
532 	uint32_t spsr;
533 
534 	asm volatile ("mrs	%[spsr], spsr"
535 			: [spsr] "=r" (spsr)
536 	);
537 	return spsr;
538 }
539 
540 static inline uint32_t read_actlr(void)
541 {
542 	uint32_t actlr;
543 
544 	asm volatile ("mrc	p15, 0, %[actlr], c1, c0, 1"
545 			: [actlr] "=r" (actlr)
546 	);
547 
548 	return actlr;
549 }
550 
551 static inline void write_actlr(uint32_t actlr)
552 {
553 	asm volatile ("mcr	p15, 0, %[actlr], c1, c0, 1"
554 			: : [actlr] "r" (actlr)
555 	);
556 }
557 
558 static inline uint32_t read_nsacr(void)
559 {
560 	uint32_t nsacr;
561 
562 	asm volatile ("mrc	p15, 0, %[nsacr], c1, c1, 2"
563 			: [nsacr] "=r" (nsacr)
564 	);
565 
566 	return nsacr;
567 }
568 
569 static inline void write_nsacr(uint32_t nsacr)
570 {
571 	asm volatile ("mcr	p15, 0, %[nsacr], c1, c1, 2"
572 			: : [nsacr] "r" (nsacr)
573 	);
574 }
575 
576 static inline uint64_t read_cntpct(void)
577 {
578 	uint64_t val;
579 
580 	asm volatile("mrrc p15, 0, %Q0, %R0, c14" : "=r" (val));
581 	return val;
582 }
583 
584 static inline uint32_t read_cntfrq(void)
585 {
586 	uint32_t frq;
587 
588 	asm volatile("mrc p15, 0, %0, c14, c0, 0" : "=r" (frq));
589 	return frq;
590 }
591 
592 static inline void write_cntfrq(uint32_t frq)
593 {
594 	asm volatile("mcr p15, 0, %0, c14, c0, 0" : : "r" (frq));
595 }
596 
597 static inline uint32_t read_cntkctl(void)
598 {
599 	uint32_t cntkctl;
600 
601 	asm volatile("mrc p15, 0, %0, c14, c1, 0" : "=r" (cntkctl));
602 	return cntkctl;
603 }
604 
605 static inline void write_cntkctl(uint32_t cntkctl)
606 {
607 	asm volatile("mcr p15, 0, %0, c14, c1, 0" : : "r" (cntkctl));
608 }
609 
610 static __always_inline uint32_t read_pc(void)
611 {
612 	uint32_t val;
613 
614 	asm volatile ("adr %0, ." : "=r" (val));
615 	return val;
616 }
617 
618 static __always_inline uint32_t read_sp(void)
619 {
620 	uint32_t val;
621 
622 	asm volatile ("mov %0, sp" : "=r" (val));
623 	return val;
624 }
625 
626 static __always_inline uint32_t read_lr(void)
627 {
628 	uint32_t val;
629 
630 	asm volatile ("mov %0, lr" : "=r" (val));
631 	return val;
632 }
633 
634 static __always_inline uint32_t read_fp(void)
635 {
636 	uint32_t val;
637 
638 	asm volatile ("mov %0, fp" : "=r" (val));
639 	return val;
640 }
641 
642 static __always_inline uint32_t read_r7(void)
643 {
644 	uint32_t val;
645 
646 	asm volatile ("mov %0, r7" : "=r" (val));
647 	return val;
648 }
649 
650 /* Register read/write functions for GICC registers by using system interface */
651 static inline uint32_t read_icc_ctlr(void)
652 {
653 	uint32_t v;
654 
655 	asm volatile ("mrc p15,0,%0,c12,c12,4" : "=r" (v));
656 	return v;
657 }
658 
659 static inline void write_icc_ctlr(uint32_t v)
660 {
661 	asm volatile ("mcr p15,0,%0,c12,c12,4" : : "r" (v));
662 }
663 
664 static inline void write_icc_pmr(uint32_t v)
665 {
666 	asm volatile ("mcr p15,0,%0,c4,c6,0" : : "r" (v));
667 }
668 
669 static inline uint32_t read_icc_iar0(void)
670 {
671 	uint32_t v;
672 
673 	asm volatile ("mrc p15,0,%0,c12,c8,0" : "=r" (v));
674 	return v;
675 }
676 
677 static inline void write_icc_eoir0(uint32_t v)
678 {
679 	asm volatile ("mcr p15,0,%0,c12,c8,1" : : "r" (v));
680 }
681 
682 static inline uint64_t read_pmu_ccnt(void)
683 {
684 	uint32_t val;
685 
686 	asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r"(val));
687 	return val;
688 }
689 
690 static inline void wfi(void)
691 {
692 	asm volatile("wfi");
693 }
694 #endif /*ASM*/
695 
696 #endif /*ARM32_H*/
697