xref: /optee_os/core/arch/arm/include/arm32.h (revision 7c43c0a3b4692523e620396124c88b47c1260346)
1 /* SPDX-License-Identifier: BSD-2-Clause */
2 /*
3  * Copyright (c) 2016, Linaro Limited
4  * Copyright (c) 2014, STMicroelectronics International N.V.
5  * All rights reserved.
6  *
7  * Redistribution and use in source and binary forms, with or without
8  * modification, are permitted provided that the following conditions are met:
9  *
10  * 1. Redistributions of source code must retain the above copyright notice,
11  * this list of conditions and the following disclaimer.
12  *
13  * 2. Redistributions in binary form must reproduce the above copyright notice,
14  * this list of conditions and the following disclaimer in the documentation
15  * and/or other materials provided with the distribution.
16  *
17  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
21  * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27  * POSSIBILITY OF SUCH DAMAGE.
28  */
29 
30 #ifndef ARM32_H
31 #define ARM32_H
32 
33 #include <sys/cdefs.h>
34 #include <stdint.h>
35 #include <util.h>
36 
37 #define CORTEX_A7_PART_NUM		0xC07
38 #define CORTEX_A8_PART_NUM		0xC08
39 #define CORTEX_A9_PART_NUM		0xC09
40 #define CORTEX_A15_PART_NUM		0xC0F
41 #define CORTEX_A17_PART_NUM		0xC0E
42 #define CORTEX_A57_PART_NUM		0xD07
43 #define CORTEX_A72_PART_NUM		0xD08
44 #define CORTEX_A73_PART_NUM		0xD09
45 #define CORTEX_A75_PART_NUM		0xD0A
46 
47 #define MIDR_PRIMARY_PART_NUM_SHIFT	4
48 #define MIDR_PRIMARY_PART_NUM_WIDTH	12
49 
50 #define MIDR_IMPLEMENTER_SHIFT		24
51 #define MIDR_IMPLEMENTER_WIDTH		8
52 #define MIDR_IMPLEMENTER_ARM		0x41
53 
54 #define CPSR_MODE_MASK	ARM32_CPSR_MODE_MASK
55 #define CPSR_MODE_USR	ARM32_CPSR_MODE_USR
56 #define CPSR_MODE_FIQ	ARM32_CPSR_MODE_FIQ
57 #define CPSR_MODE_IRQ	ARM32_CPSR_MODE_IRQ
58 #define CPSR_MODE_SVC	ARM32_CPSR_MODE_SVC
59 #define CPSR_MODE_MON	ARM32_CPSR_MODE_MON
60 #define CPSR_MODE_ABT	ARM32_CPSR_MODE_ABT
61 #define CPSR_MODE_UND	ARM32_CPSR_MODE_UND
62 #define CPSR_MODE_SYS	ARM32_CPSR_MODE_SYS
63 
64 #define CPSR_T		ARM32_CPSR_T
65 #define CPSR_F_SHIFT	ARM32_CPSR_F_SHIFT
66 #define CPSR_F		ARM32_CPSR_F
67 #define CPSR_I		ARM32_CPSR_I
68 #define CPSR_A		ARM32_CPSR_A
69 #define CPSR_FIA	ARM32_CPSR_FIA
70 #define CPSR_IT_MASK	ARM32_CPSR_IT_MASK
71 #define CPSR_IT_MASK1	ARM32_CPSR_IT_MASK1
72 #define CPSR_IT_MASK2	ARM32_CPSR_IT_MASK2
73 
74 #define SCR_NS		BIT32(0)
75 #define SCR_IRQ		BIT32(1)
76 #define SCR_FIQ		BIT32(2)
77 #define SCR_EA		BIT32(3)
78 #define SCR_FW		BIT32(4)
79 #define SCR_AW		BIT32(5)
80 #define SCR_NET		BIT32(6)
81 #define SCR_SCD		BIT32(7)
82 #define SCR_HCE		BIT32(8)
83 #define SCR_SIF		BIT32(9)
84 
85 #define SCTLR_M		BIT32(0)
86 #define SCTLR_A		BIT32(1)
87 #define SCTLR_C		BIT32(2)
88 #define SCTLR_CP15BEN	BIT32(5)
89 #define SCTLR_SW	BIT32(10)
90 #define SCTLR_Z		BIT32(11)
91 #define SCTLR_I		BIT32(12)
92 #define SCTLR_V		BIT32(13)
93 #define SCTLR_RR	BIT32(14)
94 #define SCTLR_HA	BIT32(17)
95 #define SCTLR_WXN	BIT32(19)
96 #define SCTLR_UWXN	BIT32(20)
97 #define SCTLR_FI	BIT32(21)
98 #define SCTLR_VE	BIT32(24)
99 #define SCTLR_EE	BIT32(25)
100 #define SCTLR_NMFI	BIT32(26)
101 #define SCTLR_TRE	BIT32(28)
102 #define SCTLR_AFE	BIT32(29)
103 #define SCTLR_TE	BIT32(30)
104 
105 #define ACTLR_SMP	BIT32(6)
106 #define ACTLR_DODMBS	BIT32(10)
107 #define ACTLR_L2RADIS	BIT32(11)
108 #define ACTLR_L1RADIS	BIT32(12)
109 #define ACTLR_L1PCTL	BIT32(13)
110 #define ACTLR_DDVM	BIT32(15)
111 #define ACTLR_DDI	BIT32(28)
112 
113 #define NSACR_CP10	BIT32(10)
114 #define NSACR_CP11	BIT32(11)
115 #define NSACR_NSD32DIS	BIT32(14)
116 #define NSACR_NSASEDIS	BIT32(15)
117 #define NSACR_NS_L2ERR	BIT32(17)
118 #define NSACR_NS_SMP	BIT32(18)
119 
120 #define CPACR_ASEDIS	BIT32(31)
121 #define CPACR_D32DIS	BIT32(30)
122 #define CPACR_CP(co_proc, access)	SHIFT_U32((access), ((co_proc) * 2))
123 #define CPACR_CP_ACCESS_DENIED		0x0
124 #define CPACR_CP_ACCESS_PL1_ONLY	0x1
125 #define CPACR_CP_ACCESS_FULL		0x3
126 
127 
128 #define DACR_DOMAIN(num, perm)		SHIFT_U32((perm), ((num) * 2))
129 #define DACR_DOMAIN_PERM_NO_ACCESS	0x0
130 #define DACR_DOMAIN_PERM_CLIENT		0x1
131 #define DACR_DOMAIN_PERM_MANAGER	0x3
132 
133 #define PAR_F			BIT32(0)
134 #define PAR_SS			BIT32(1)
135 #define PAR_LPAE		BIT32(11)
136 #define PAR_PA_SHIFT		12
137 #define PAR32_PA_MASK		(BIT32(20) - 1)
138 #define PAR64_PA_MASK		(BIT64(28) - 1)
139 
140 /*
141  * TTBCR has different register layout if LPAE is enabled or not.
142  * TTBCR.EAE == 0 => LPAE is not enabled
143  * TTBCR.EAE == 1 => LPAE is enabled
144  */
145 #define TTBCR_EAE	BIT32(31)
146 
147 /* When TTBCR.EAE == 0 */
148 #define TTBCR_PD0	BIT32(4)
149 #define TTBCR_PD1	BIT32(5)
150 
151 /* When TTBCR.EAE == 1 */
152 #define TTBCR_T0SZ_SHIFT	0
153 #define TTBCR_EPD0		BIT32(7)
154 #define TTBCR_IRGN0_SHIFT	8
155 #define TTBCR_ORGN0_SHIFT	10
156 #define TTBCR_SH0_SHIFT		12
157 #define TTBCR_T1SZ_SHIFT	16
158 #define TTBCR_A1		BIT32(22)
159 #define TTBCR_EPD1		BIT32(23)
160 #define TTBCR_IRGN1_SHIFT	24
161 #define TTBCR_ORGN1_SHIFT	26
162 #define TTBCR_SH1_SHIFT		28
163 
164 /* Normal memory, Inner/Outer Non-cacheable */
165 #define TTBCR_XRGNX_NC		0x0
166 /* Normal memory, Inner/Outer Write-Back Write-Allocate Cacheable */
167 #define TTBCR_XRGNX_WB		0x1
168 /* Normal memory, Inner/Outer Write-Through Cacheable */
169 #define TTBCR_XRGNX_WT		0x2
170 /* Normal memory, Inner/Outer Write-Back no Write-Allocate Cacheable */
171 #define TTBCR_XRGNX_WBWA	0x3
172 
173 /* Non-shareable */
174 #define TTBCR_SHX_NSH		0x0
175 /* Outer Shareable */
176 #define TTBCR_SHX_OSH		0x2
177 /* Inner Shareable */
178 #define TTBCR_SHX_ISH		0x3
179 
180 #define TTBR_ASID_MASK		0xff
181 #define TTBR_ASID_SHIFT		48
182 
183 
184 #define FSR_LPAE		BIT32(9)
185 #define FSR_WNR			BIT32(11)
186 
187 /* Valid if FSR.LPAE is 1 */
188 #define FSR_STATUS_MASK		(BIT32(6) - 1)
189 
190 /* Valid if FSR.LPAE is 0 */
191 #define FSR_FS_MASK		(BIT32(10) | (BIT32(4) - 1))
192 
193 #ifndef ASM
194 static inline uint32_t read_mpidr(void)
195 {
196 	uint32_t mpidr;
197 
198 	asm volatile ("mrc	p15, 0, %[mpidr], c0, c0, 5"
199 			: [mpidr] "=r" (mpidr)
200 	);
201 
202 	return mpidr;
203 }
204 
205 static inline uint32_t read_sctlr(void)
206 {
207 	uint32_t sctlr;
208 
209 	asm volatile ("mrc	p15, 0, %[sctlr], c1, c0, 0"
210 			: [sctlr] "=r" (sctlr)
211 	);
212 
213 	return sctlr;
214 }
215 
216 static inline void write_sctlr(uint32_t sctlr)
217 {
218 	asm volatile ("mcr	p15, 0, %[sctlr], c1, c0, 0"
219 			: : [sctlr] "r" (sctlr)
220 	);
221 }
222 
223 static inline uint32_t read_cpacr(void)
224 {
225 	uint32_t cpacr;
226 
227 	asm volatile ("mrc	p15, 0, %[cpacr], c1, c0, 2"
228 			: [cpacr] "=r" (cpacr)
229 	);
230 
231 	return cpacr;
232 }
233 
234 static inline void write_cpacr(uint32_t cpacr)
235 {
236 	asm volatile ("mcr	p15, 0, %[cpacr], c1, c0, 2"
237 			: : [cpacr] "r" (cpacr)
238 	);
239 }
240 
241 static inline void write_ttbr0(uint32_t ttbr0)
242 {
243 	asm volatile ("mcr	p15, 0, %[ttbr0], c2, c0, 0"
244 			: : [ttbr0] "r" (ttbr0)
245 	);
246 }
247 
248 static inline void write_ttbr0_64bit(uint64_t ttbr0)
249 {
250 	asm volatile ("mcrr	p15, 0, %Q[ttbr0], %R[ttbr0], c2"
251 			: : [ttbr0] "r" (ttbr0)
252 	);
253 }
254 
255 static inline uint32_t read_ttbr0(void)
256 {
257 	uint32_t ttbr0;
258 
259 	asm volatile ("mrc	p15, 0, %[ttbr0], c2, c0, 0"
260 			: [ttbr0] "=r" (ttbr0)
261 	);
262 
263 	return ttbr0;
264 }
265 
266 static inline uint64_t read_ttbr0_64bit(void)
267 {
268 	uint64_t ttbr0;
269 
270 	asm volatile ("mrrc	p15, 0, %Q[ttbr0], %R[ttbr0], c2"
271 			: [ttbr0] "=r" (ttbr0)
272 	);
273 
274 	return ttbr0;
275 }
276 
277 static inline void write_ttbr1(uint32_t ttbr1)
278 {
279 	asm volatile ("mcr	p15, 0, %[ttbr1], c2, c0, 1"
280 			: : [ttbr1] "r" (ttbr1)
281 	);
282 }
283 
284 static inline void write_ttbr1_64bit(uint64_t ttbr1)
285 {
286 	asm volatile ("mcrr	p15, 1, %Q[ttbr1], %R[ttbr1], c2"
287 			: : [ttbr1] "r" (ttbr1)
288 	);
289 }
290 
291 static inline uint32_t read_ttbr1(void)
292 {
293 	uint32_t ttbr1;
294 
295 	asm volatile ("mrc	p15, 0, %[ttbr1], c2, c0, 1"
296 			: [ttbr1] "=r" (ttbr1)
297 	);
298 
299 	return ttbr1;
300 }
301 
302 
303 static inline void write_ttbcr(uint32_t ttbcr)
304 {
305 	asm volatile ("mcr	p15, 0, %[ttbcr], c2, c0, 2"
306 			: : [ttbcr] "r" (ttbcr)
307 	);
308 }
309 
310 static inline uint32_t read_ttbcr(void)
311 {
312 	uint32_t ttbcr;
313 
314 	asm volatile ("mrc	p15, 0, %[ttbcr], c2, c0, 2"
315 			: [ttbcr] "=r" (ttbcr)
316 	);
317 
318 	return ttbcr;
319 }
320 
321 static inline void write_dacr(uint32_t dacr)
322 {
323 	asm volatile ("mcr	p15, 0, %[dacr], c3, c0, 0"
324 			: : [dacr] "r" (dacr)
325 	);
326 }
327 
328 static inline uint32_t read_ifar(void)
329 {
330 	uint32_t ifar;
331 
332 	asm volatile ("mrc	p15, 0, %[ifar], c6, c0, 2"
333 			: [ifar] "=r" (ifar)
334 	);
335 
336 	return ifar;
337 }
338 
339 static inline uint32_t read_dfar(void)
340 {
341 	uint32_t dfar;
342 
343 	asm volatile ("mrc	p15, 0, %[dfar], c6, c0, 0"
344 			: [dfar] "=r" (dfar)
345 	);
346 
347 	return dfar;
348 }
349 
350 static inline uint32_t read_dfsr(void)
351 {
352 	uint32_t dfsr;
353 
354 	asm volatile ("mrc	p15, 0, %[dfsr], c5, c0, 0"
355 			: [dfsr] "=r" (dfsr)
356 	);
357 
358 	return dfsr;
359 }
360 
361 static inline uint32_t read_ifsr(void)
362 {
363 	uint32_t ifsr;
364 
365 	asm volatile ("mrc	p15, 0, %[ifsr], c5, c0, 1"
366 			: [ifsr] "=r" (ifsr)
367 	);
368 
369 	return ifsr;
370 }
371 
372 static inline void write_scr(uint32_t scr)
373 {
374 	asm volatile ("mcr	p15, 0, %[scr], c1, c1, 0"
375 			: : [scr] "r" (scr)
376 	);
377 }
378 
379 static inline void isb(void)
380 {
381 	asm volatile ("isb");
382 }
383 
384 static inline void dsb(void)
385 {
386 	asm volatile ("dsb");
387 }
388 
389 static inline void dsb_ish(void)
390 {
391 	asm volatile ("dsb ish");
392 }
393 
394 static inline void dsb_ishst(void)
395 {
396 	asm volatile ("dsb ishst");
397 }
398 
399 static inline void dmb(void)
400 {
401 	asm volatile ("dmb");
402 }
403 
404 static inline void sev(void)
405 {
406 	asm volatile ("sev");
407 }
408 
409 static inline void wfe(void)
410 {
411 	asm volatile ("wfe");
412 }
413 
414 /* Address translate privileged write translation (current state secure PL1) */
415 static inline void write_ats1cpw(uint32_t va)
416 {
417 	asm volatile ("mcr	p15, 0, %0, c7, c8, 1" : : "r" (va));
418 }
419 
420 static inline void write_ats1cpr(uint32_t va)
421 {
422 	asm volatile ("mcr	p15, 0, %0, c7, c8, 0" : : "r" (va));
423 }
424 
425 static inline void write_ats1cpuw(uint32_t va)
426 {
427 	asm volatile ("mcr	p15, 0, %0, c7, c8, 3" : : "r" (va));
428 }
429 
430 static inline void write_ats1cpur(uint32_t va)
431 {
432 	asm volatile ("mcr	p15, 0, %0, c7, c8, 2" : : "r" (va));
433 }
434 
435 static inline uint32_t read_par32(void)
436 {
437 	uint32_t val;
438 
439 	asm volatile ("mrc	p15, 0, %0, c7, c4, 0" : "=r" (val));
440 	return val;
441 }
442 
443 #ifdef CFG_WITH_LPAE
444 static inline uint64_t read_par64(void)
445 {
446 	uint64_t val;
447 
448 	asm volatile ("mrrc	p15, 0, %Q0, %R0, c7" : "=r" (val));
449 	return val;
450 }
451 #endif
452 
453 static inline void write_tlbimvaais(uint32_t mva)
454 {
455 	asm volatile ("mcr	p15, 0, %[mva], c8, c3, 3"
456 			: : [mva] "r" (mva)
457 	);
458 }
459 
460 static inline void write_mair0(uint32_t mair0)
461 {
462 	asm volatile ("mcr	p15, 0, %[mair0], c10, c2, 0"
463 			: : [mair0] "r" (mair0)
464 	);
465 }
466 
467 static inline void write_prrr(uint32_t prrr)
468 {
469 	/*
470 	 * Same physical register as MAIR0.
471 	 *
472 	 * When an implementation includes the Large Physical Address
473 	 * Extension, and address translation is using the Long-descriptor
474 	 * translation table formats, MAIR0 replaces the PRRR
475 	 */
476 	write_mair0(prrr);
477 }
478 
479 static inline void write_mair1(uint32_t mair1)
480 {
481 	asm volatile ("mcr	p15, 0, %[mair1], c10, c2, 1"
482 			: : [mair1] "r" (mair1)
483 	);
484 }
485 
486 static inline void write_nmrr(uint32_t nmrr)
487 {
488 	/*
489 	 * Same physical register as MAIR1.
490 	 *
491 	 * When an implementation includes the Large Physical Address
492 	 * Extension, and address translation is using the Long-descriptor
493 	 * translation table formats, MAIR1 replaces the NMRR
494 	 */
495 	write_mair1(nmrr);
496 }
497 
498 static inline uint32_t read_contextidr(void)
499 {
500 	uint32_t contextidr;
501 
502 	asm volatile ("mrc	p15, 0, %[contextidr], c13, c0, 1"
503 			: [contextidr] "=r" (contextidr)
504 	);
505 
506 	return contextidr;
507 }
508 
509 static inline void write_contextidr(uint32_t contextidr)
510 {
511 	asm volatile ("mcr	p15, 0, %[contextidr], c13, c0, 1"
512 			: : [contextidr] "r" (contextidr)
513 	);
514 }
515 
516 static inline uint32_t read_cpsr(void)
517 {
518 	uint32_t cpsr;
519 
520 	asm volatile ("mrs	%[cpsr], cpsr"
521 			: [cpsr] "=r" (cpsr)
522 	);
523 	return cpsr;
524 }
525 
526 static inline void write_cpsr(uint32_t cpsr)
527 {
528 	asm volatile ("msr	cpsr_fsxc, %[cpsr]"
529 			: : [cpsr] "r" (cpsr)
530 	);
531 }
532 
533 static inline uint32_t read_spsr(void)
534 {
535 	uint32_t spsr;
536 
537 	asm volatile ("mrs	%[spsr], spsr"
538 			: [spsr] "=r" (spsr)
539 	);
540 	return spsr;
541 }
542 
543 static inline uint32_t read_actlr(void)
544 {
545 	uint32_t actlr;
546 
547 	asm volatile ("mrc	p15, 0, %[actlr], c1, c0, 1"
548 			: [actlr] "=r" (actlr)
549 	);
550 
551 	return actlr;
552 }
553 
554 static inline void write_actlr(uint32_t actlr)
555 {
556 	asm volatile ("mcr	p15, 0, %[actlr], c1, c0, 1"
557 			: : [actlr] "r" (actlr)
558 	);
559 }
560 
561 static inline uint32_t read_nsacr(void)
562 {
563 	uint32_t nsacr;
564 
565 	asm volatile ("mrc	p15, 0, %[nsacr], c1, c1, 2"
566 			: [nsacr] "=r" (nsacr)
567 	);
568 
569 	return nsacr;
570 }
571 
572 static inline void write_nsacr(uint32_t nsacr)
573 {
574 	asm volatile ("mcr	p15, 0, %[nsacr], c1, c1, 2"
575 			: : [nsacr] "r" (nsacr)
576 	);
577 }
578 
579 static inline uint64_t read_cntpct(void)
580 {
581 	uint64_t val;
582 
583 	asm volatile("mrrc p15, 0, %Q0, %R0, c14" : "=r" (val));
584 	return val;
585 }
586 
587 static inline uint32_t read_cntfrq(void)
588 {
589 	uint32_t frq;
590 
591 	asm volatile("mrc p15, 0, %0, c14, c0, 0" : "=r" (frq));
592 	return frq;
593 }
594 
595 static inline void write_cntfrq(uint32_t frq)
596 {
597 	asm volatile("mcr p15, 0, %0, c14, c0, 0" : : "r" (frq));
598 }
599 
600 static inline uint32_t read_cntkctl(void)
601 {
602 	uint32_t cntkctl;
603 
604 	asm volatile("mrc p15, 0, %0, c14, c1, 0" : "=r" (cntkctl));
605 	return cntkctl;
606 }
607 
608 static inline void write_cntkctl(uint32_t cntkctl)
609 {
610 	asm volatile("mcr p15, 0, %0, c14, c1, 0" : : "r" (cntkctl));
611 }
612 
613 static __always_inline uint32_t read_pc(void)
614 {
615 	uint32_t val;
616 
617 	asm volatile ("adr %0, ." : "=r" (val));
618 	return val;
619 }
620 
621 static __always_inline uint32_t read_sp(void)
622 {
623 	uint32_t val;
624 
625 	asm volatile ("mov %0, sp" : "=r" (val));
626 	return val;
627 }
628 
629 static __always_inline uint32_t read_lr(void)
630 {
631 	uint32_t val;
632 
633 	asm volatile ("mov %0, lr" : "=r" (val));
634 	return val;
635 }
636 
637 static __always_inline uint32_t read_fp(void)
638 {
639 	uint32_t val;
640 
641 	asm volatile ("mov %0, fp" : "=r" (val));
642 	return val;
643 }
644 
645 static __always_inline uint32_t read_r7(void)
646 {
647 	uint32_t val;
648 
649 	asm volatile ("mov %0, r7" : "=r" (val));
650 	return val;
651 }
652 
653 /* Register read/write functions for GICC registers by using system interface */
654 static inline uint32_t read_icc_ctlr(void)
655 {
656 	uint32_t v;
657 
658 	asm volatile ("mrc p15,0,%0,c12,c12,4" : "=r" (v));
659 	return v;
660 }
661 
662 static inline void write_icc_ctlr(uint32_t v)
663 {
664 	asm volatile ("mcr p15,0,%0,c12,c12,4" : : "r" (v));
665 }
666 
667 static inline void write_icc_pmr(uint32_t v)
668 {
669 	asm volatile ("mcr p15,0,%0,c4,c6,0" : : "r" (v));
670 }
671 
672 static inline uint32_t read_icc_iar0(void)
673 {
674 	uint32_t v;
675 
676 	asm volatile ("mrc p15,0,%0,c12,c8,0" : "=r" (v));
677 	return v;
678 }
679 
680 static inline void write_icc_eoir0(uint32_t v)
681 {
682 	asm volatile ("mcr p15,0,%0,c12,c8,1" : : "r" (v));
683 }
684 
685 static inline uint64_t read_pmu_ccnt(void)
686 {
687 	uint32_t val;
688 
689 	asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r"(val));
690 	return val;
691 }
692 
693 static inline void wfi(void)
694 {
695 	asm volatile("wfi");
696 }
697 #endif /*ASM*/
698 
699 #endif /*ARM32_H*/
700