xref: /optee_os/core/arch/arm/include/arm32.h (revision bc879b1765afacd8a2b7673236037181011cabea)
1 /* SPDX-License-Identifier: BSD-2-Clause */
2 /*
3  * Copyright (c) 2016, Linaro Limited
4  * Copyright (c) 2014, STMicroelectronics International N.V.
5  * All rights reserved.
6  *
7  * Redistribution and use in source and binary forms, with or without
8  * modification, are permitted provided that the following conditions are met:
9  *
10  * 1. Redistributions of source code must retain the above copyright notice,
11  * this list of conditions and the following disclaimer.
12  *
13  * 2. Redistributions in binary form must reproduce the above copyright notice,
14  * this list of conditions and the following disclaimer in the documentation
15  * and/or other materials provided with the distribution.
16  *
17  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
21  * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27  * POSSIBILITY OF SUCH DAMAGE.
28  */
29 
30 #ifndef ARM32_H
31 #define ARM32_H
32 
33 #include <sys/cdefs.h>
34 #include <stdint.h>
35 #include <util.h>
36 
37 #define CPSR_MODE_MASK	ARM32_CPSR_MODE_MASK
38 #define CPSR_MODE_USR	ARM32_CPSR_MODE_USR
39 #define CPSR_MODE_FIQ	ARM32_CPSR_MODE_FIQ
40 #define CPSR_MODE_IRQ	ARM32_CPSR_MODE_IRQ
41 #define CPSR_MODE_SVC	ARM32_CPSR_MODE_SVC
42 #define CPSR_MODE_MON	ARM32_CPSR_MODE_MON
43 #define CPSR_MODE_ABT	ARM32_CPSR_MODE_ABT
44 #define CPSR_MODE_UND	ARM32_CPSR_MODE_UND
45 #define CPSR_MODE_SYS	ARM32_CPSR_MODE_SYS
46 
47 #define CPSR_T		ARM32_CPSR_T
48 #define CPSR_F_SHIFT	ARM32_CPSR_F_SHIFT
49 #define CPSR_F		ARM32_CPSR_F
50 #define CPSR_I		ARM32_CPSR_I
51 #define CPSR_A		ARM32_CPSR_A
52 #define CPSR_FIA	ARM32_CPSR_FIA
53 #define CPSR_IT_MASK	ARM32_CPSR_IT_MASK
54 #define CPSR_IT_MASK1	ARM32_CPSR_IT_MASK1
55 #define CPSR_IT_MASK2	ARM32_CPSR_IT_MASK2
56 
57 #define SCR_NS		BIT32(0)
58 #define SCR_IRQ		BIT32(1)
59 #define SCR_FIQ		BIT32(2)
60 #define SCR_EA		BIT32(3)
61 #define SCR_FW		BIT32(4)
62 #define SCR_AW		BIT32(5)
63 #define SCR_NET		BIT32(6)
64 #define SCR_SCD		BIT32(7)
65 #define SCR_HCE		BIT32(8)
66 #define SCR_SIF		BIT32(9)
67 
68 #define SCTLR_M		BIT32(0)
69 #define SCTLR_A		BIT32(1)
70 #define SCTLR_C		BIT32(2)
71 #define SCTLR_CP15BEN	BIT32(5)
72 #define SCTLR_SW	BIT32(10)
73 #define SCTLR_Z		BIT32(11)
74 #define SCTLR_I		BIT32(12)
75 #define SCTLR_V		BIT32(13)
76 #define SCTLR_RR	BIT32(14)
77 #define SCTLR_HA	BIT32(17)
78 #define SCTLR_WXN	BIT32(19)
79 #define SCTLR_UWXN	BIT32(20)
80 #define SCTLR_FI	BIT32(21)
81 #define SCTLR_VE	BIT32(24)
82 #define SCTLR_EE	BIT32(25)
83 #define SCTLR_NMFI	BIT32(26)
84 #define SCTLR_TRE	BIT32(28)
85 #define SCTLR_AFE	BIT32(29)
86 #define SCTLR_TE	BIT32(30)
87 
88 /* Only valid for Cortex-A15 */
89 #define ACTLR_CA15_ENABLE_INVALIDATE_BTB	BIT(0)
90 /* Only valid for Cortex-A8 */
91 #define ACTLR_CA8_ENABLE_INVALIDATE_BTB		BIT(6)
92 
93 #define ACTLR_SMP	BIT32(6)
94 
95 #define NSACR_CP10	BIT32(10)
96 #define NSACR_CP11	BIT32(11)
97 #define NSACR_NSD32DIS	BIT32(14)
98 #define NSACR_NSASEDIS	BIT32(15)
99 #define NSACR_NS_L2ERR	BIT32(17)
100 #define NSACR_NS_SMP	BIT32(18)
101 
102 #define CPACR_ASEDIS	BIT32(31)
103 #define CPACR_D32DIS	BIT32(30)
104 #define CPACR_CP(co_proc, access)	SHIFT_U32((access), ((co_proc) * 2))
105 #define CPACR_CP_ACCESS_DENIED		0x0
106 #define CPACR_CP_ACCESS_PL1_ONLY	0x1
107 #define CPACR_CP_ACCESS_FULL		0x3
108 
109 
110 #define DACR_DOMAIN(num, perm)		SHIFT_U32((perm), ((num) * 2))
111 #define DACR_DOMAIN_PERM_NO_ACCESS	0x0
112 #define DACR_DOMAIN_PERM_CLIENT		0x1
113 #define DACR_DOMAIN_PERM_MANAGER	0x3
114 
115 #define PAR_F			BIT32(0)
116 #define PAR_SS			BIT32(1)
117 #define PAR_LPAE		BIT32(11)
118 #define PAR_PA_SHIFT		12
119 #define PAR32_PA_MASK		(BIT32(20) - 1)
120 #define PAR64_PA_MASK		(BIT64(28) - 1)
121 
122 /*
123  * TTBCR has different register layout if LPAE is enabled or not.
124  * TTBCR.EAE == 0 => LPAE is not enabled
125  * TTBCR.EAE == 1 => LPAE is enabled
126  */
127 #define TTBCR_EAE	BIT32(31)
128 
129 /* When TTBCR.EAE == 0 */
130 #define TTBCR_PD0	BIT32(4)
131 #define TTBCR_PD1	BIT32(5)
132 
133 /* When TTBCR.EAE == 1 */
134 #define TTBCR_T0SZ_SHIFT	0
135 #define TTBCR_EPD0		BIT32(7)
136 #define TTBCR_IRGN0_SHIFT	8
137 #define TTBCR_ORGN0_SHIFT	10
138 #define TTBCR_SH0_SHIFT		12
139 #define TTBCR_T1SZ_SHIFT	16
140 #define TTBCR_A1		BIT32(22)
141 #define TTBCR_EPD1		BIT32(23)
142 #define TTBCR_IRGN1_SHIFT	24
143 #define TTBCR_ORGN1_SHIFT	26
144 #define TTBCR_SH1_SHIFT		28
145 
146 /* Normal memory, Inner/Outer Non-cacheable */
147 #define TTBCR_XRGNX_NC		0x0
148 /* Normal memory, Inner/Outer Write-Back Write-Allocate Cacheable */
149 #define TTBCR_XRGNX_WB		0x1
150 /* Normal memory, Inner/Outer Write-Through Cacheable */
151 #define TTBCR_XRGNX_WT		0x2
152 /* Normal memory, Inner/Outer Write-Back no Write-Allocate Cacheable */
153 #define TTBCR_XRGNX_WBWA	0x3
154 
155 /* Non-shareable */
156 #define TTBCR_SHX_NSH		0x0
157 /* Outer Shareable */
158 #define TTBCR_SHX_OSH		0x2
159 /* Inner Shareable */
160 #define TTBCR_SHX_ISH		0x3
161 
162 #define TTBR_ASID_MASK		0xff
163 #define TTBR_ASID_SHIFT		48
164 
165 
166 #define FSR_LPAE		BIT32(9)
167 #define FSR_WNR			BIT32(11)
168 
169 /* Valid if FSR.LPAE is 1 */
170 #define FSR_STATUS_MASK		(BIT32(6) - 1)
171 
172 /* Valid if FSR.LPAE is 0 */
173 #define FSR_FS_MASK		(BIT32(10) | (BIT32(4) - 1))
174 
175 /* ID_PFR1 bit fields */
176 #define IDPFR1_VIRT_SHIFT            12
177 #define IDPFR1_VIRT_MASK             (0xF << IDPFR1_VIRT_SHIFT)
178 #define IDPFR1_GENTIMER_SHIFT        16
179 #define IDPFR1_GENTIMER_MASK         (0xF << IDPFR1_GENTIMER_SHIFT)
180 
181 #ifndef ASM
182 static inline uint32_t read_midr(void)
183 {
184 	uint32_t midr;
185 
186 	asm volatile ("mrc	p15, 0, %[midr], c0, c0, 0"
187 			: [midr] "=r" (midr)
188 	);
189 
190 	return midr;
191 }
192 
193 static inline uint32_t read_mpidr(void)
194 {
195 	uint32_t mpidr;
196 
197 	asm volatile ("mrc	p15, 0, %[mpidr], c0, c0, 5"
198 			: [mpidr] "=r" (mpidr)
199 	);
200 
201 	return mpidr;
202 }
203 
204 static inline uint32_t read_sctlr(void)
205 {
206 	uint32_t sctlr;
207 
208 	asm volatile ("mrc	p15, 0, %[sctlr], c1, c0, 0"
209 			: [sctlr] "=r" (sctlr)
210 	);
211 
212 	return sctlr;
213 }
214 
215 static inline void write_sctlr(uint32_t sctlr)
216 {
217 	asm volatile ("mcr	p15, 0, %[sctlr], c1, c0, 0"
218 			: : [sctlr] "r" (sctlr)
219 	);
220 }
221 
222 static inline uint32_t read_cpacr(void)
223 {
224 	uint32_t cpacr;
225 
226 	asm volatile ("mrc	p15, 0, %[cpacr], c1, c0, 2"
227 			: [cpacr] "=r" (cpacr)
228 	);
229 
230 	return cpacr;
231 }
232 
233 static inline void write_cpacr(uint32_t cpacr)
234 {
235 	asm volatile ("mcr	p15, 0, %[cpacr], c1, c0, 2"
236 			: : [cpacr] "r" (cpacr)
237 	);
238 }
239 
240 static inline void write_ttbr0(uint32_t ttbr0)
241 {
242 	asm volatile ("mcr	p15, 0, %[ttbr0], c2, c0, 0"
243 			: : [ttbr0] "r" (ttbr0)
244 	);
245 }
246 
247 static inline void write_ttbr0_64bit(uint64_t ttbr0)
248 {
249 	asm volatile ("mcrr	p15, 0, %Q[ttbr0], %R[ttbr0], c2"
250 			: : [ttbr0] "r" (ttbr0)
251 	);
252 }
253 
254 static inline uint32_t read_ttbr0(void)
255 {
256 	uint32_t ttbr0;
257 
258 	asm volatile ("mrc	p15, 0, %[ttbr0], c2, c0, 0"
259 			: [ttbr0] "=r" (ttbr0)
260 	);
261 
262 	return ttbr0;
263 }
264 
265 static inline uint64_t read_ttbr0_64bit(void)
266 {
267 	uint64_t ttbr0;
268 
269 	asm volatile ("mrrc	p15, 0, %Q[ttbr0], %R[ttbr0], c2"
270 			: [ttbr0] "=r" (ttbr0)
271 	);
272 
273 	return ttbr0;
274 }
275 
276 static inline void write_ttbr1(uint32_t ttbr1)
277 {
278 	asm volatile ("mcr	p15, 0, %[ttbr1], c2, c0, 1"
279 			: : [ttbr1] "r" (ttbr1)
280 	);
281 }
282 
283 static inline void write_ttbr1_64bit(uint64_t ttbr1)
284 {
285 	asm volatile ("mcrr	p15, 1, %Q[ttbr1], %R[ttbr1], c2"
286 			: : [ttbr1] "r" (ttbr1)
287 	);
288 }
289 
290 static inline uint32_t read_ttbr1(void)
291 {
292 	uint32_t ttbr1;
293 
294 	asm volatile ("mrc	p15, 0, %[ttbr1], c2, c0, 1"
295 			: [ttbr1] "=r" (ttbr1)
296 	);
297 
298 	return ttbr1;
299 }
300 
301 
302 static inline void write_ttbcr(uint32_t ttbcr)
303 {
304 	asm volatile ("mcr	p15, 0, %[ttbcr], c2, c0, 2"
305 			: : [ttbcr] "r" (ttbcr)
306 	);
307 }
308 
309 static inline uint32_t read_ttbcr(void)
310 {
311 	uint32_t ttbcr;
312 
313 	asm volatile ("mrc	p15, 0, %[ttbcr], c2, c0, 2"
314 			: [ttbcr] "=r" (ttbcr)
315 	);
316 
317 	return ttbcr;
318 }
319 
320 static inline void write_dacr(uint32_t dacr)
321 {
322 	asm volatile ("mcr	p15, 0, %[dacr], c3, c0, 0"
323 			: : [dacr] "r" (dacr)
324 	);
325 }
326 
327 static inline uint32_t read_ifar(void)
328 {
329 	uint32_t ifar;
330 
331 	asm volatile ("mrc	p15, 0, %[ifar], c6, c0, 2"
332 			: [ifar] "=r" (ifar)
333 	);
334 
335 	return ifar;
336 }
337 
338 static inline uint32_t read_dfar(void)
339 {
340 	uint32_t dfar;
341 
342 	asm volatile ("mrc	p15, 0, %[dfar], c6, c0, 0"
343 			: [dfar] "=r" (dfar)
344 	);
345 
346 	return dfar;
347 }
348 
349 static inline uint32_t read_dfsr(void)
350 {
351 	uint32_t dfsr;
352 
353 	asm volatile ("mrc	p15, 0, %[dfsr], c5, c0, 0"
354 			: [dfsr] "=r" (dfsr)
355 	);
356 
357 	return dfsr;
358 }
359 
360 static inline uint32_t read_ifsr(void)
361 {
362 	uint32_t ifsr;
363 
364 	asm volatile ("mrc	p15, 0, %[ifsr], c5, c0, 1"
365 			: [ifsr] "=r" (ifsr)
366 	);
367 
368 	return ifsr;
369 }
370 
371 static inline void write_scr(uint32_t scr)
372 {
373 	asm volatile ("mcr	p15, 0, %[scr], c1, c1, 0"
374 			: : [scr] "r" (scr)
375 	);
376 }
377 
378 static inline void isb(void)
379 {
380 	asm volatile ("isb");
381 }
382 
383 static inline void dsb(void)
384 {
385 	asm volatile ("dsb");
386 }
387 
388 static inline void dsb_ish(void)
389 {
390 	asm volatile ("dsb ish");
391 }
392 
393 static inline void dsb_ishst(void)
394 {
395 	asm volatile ("dsb ishst");
396 }
397 
398 static inline void dmb(void)
399 {
400 	asm volatile ("dmb");
401 }
402 
403 static inline void sev(void)
404 {
405 	asm volatile ("sev");
406 }
407 
408 static inline void wfe(void)
409 {
410 	asm volatile ("wfe");
411 }
412 
413 /* Address translate privileged write translation (current state secure PL1) */
414 static inline void write_ats1cpw(uint32_t va)
415 {
416 	asm volatile ("mcr	p15, 0, %0, c7, c8, 1" : : "r" (va));
417 }
418 
419 static inline void write_ats1cpr(uint32_t va)
420 {
421 	asm volatile ("mcr	p15, 0, %0, c7, c8, 0" : : "r" (va));
422 }
423 
424 static inline void write_ats1cpuw(uint32_t va)
425 {
426 	asm volatile ("mcr	p15, 0, %0, c7, c8, 3" : : "r" (va));
427 }
428 
429 static inline void write_ats1cpur(uint32_t va)
430 {
431 	asm volatile ("mcr	p15, 0, %0, c7, c8, 2" : : "r" (va));
432 }
433 
434 static inline uint32_t read_par32(void)
435 {
436 	uint32_t val;
437 
438 	asm volatile ("mrc	p15, 0, %0, c7, c4, 0" : "=r" (val));
439 	return val;
440 }
441 
442 #ifdef CFG_WITH_LPAE
443 static inline uint64_t read_par64(void)
444 {
445 	uint64_t val;
446 
447 	asm volatile ("mrrc	p15, 0, %Q0, %R0, c7" : "=r" (val));
448 	return val;
449 }
450 #endif
451 
452 static inline void write_tlbimvaais(uint32_t mva)
453 {
454 	asm volatile ("mcr	p15, 0, %[mva], c8, c3, 3"
455 			: : [mva] "r" (mva)
456 	);
457 }
458 
459 static inline void write_mair0(uint32_t mair0)
460 {
461 	asm volatile ("mcr	p15, 0, %[mair0], c10, c2, 0"
462 			: : [mair0] "r" (mair0)
463 	);
464 }
465 
466 static inline void write_prrr(uint32_t prrr)
467 {
468 	/*
469 	 * Same physical register as MAIR0.
470 	 *
471 	 * When an implementation includes the Large Physical Address
472 	 * Extension, and address translation is using the Long-descriptor
473 	 * translation table formats, MAIR0 replaces the PRRR
474 	 */
475 	write_mair0(prrr);
476 }
477 
478 static inline void write_mair1(uint32_t mair1)
479 {
480 	asm volatile ("mcr	p15, 0, %[mair1], c10, c2, 1"
481 			: : [mair1] "r" (mair1)
482 	);
483 }
484 
485 static inline void write_nmrr(uint32_t nmrr)
486 {
487 	/*
488 	 * Same physical register as MAIR1.
489 	 *
490 	 * When an implementation includes the Large Physical Address
491 	 * Extension, and address translation is using the Long-descriptor
492 	 * translation table formats, MAIR1 replaces the NMRR
493 	 */
494 	write_mair1(nmrr);
495 }
496 
497 static inline uint32_t read_contextidr(void)
498 {
499 	uint32_t contextidr;
500 
501 	asm volatile ("mrc	p15, 0, %[contextidr], c13, c0, 1"
502 			: [contextidr] "=r" (contextidr)
503 	);
504 
505 	return contextidr;
506 }
507 
508 static inline void write_contextidr(uint32_t contextidr)
509 {
510 	asm volatile ("mcr	p15, 0, %[contextidr], c13, c0, 1"
511 			: : [contextidr] "r" (contextidr)
512 	);
513 }
514 
515 static inline uint32_t read_cpsr(void)
516 {
517 	uint32_t cpsr;
518 
519 	asm volatile ("mrs	%[cpsr], cpsr"
520 			: [cpsr] "=r" (cpsr)
521 	);
522 	return cpsr;
523 }
524 
525 static inline void write_cpsr(uint32_t cpsr)
526 {
527 	asm volatile ("msr	cpsr_fsxc, %[cpsr]"
528 			: : [cpsr] "r" (cpsr)
529 	);
530 }
531 
532 static inline uint32_t read_spsr(void)
533 {
534 	uint32_t spsr;
535 
536 	asm volatile ("mrs	%[spsr], spsr"
537 			: [spsr] "=r" (spsr)
538 	);
539 	return spsr;
540 }
541 
542 static inline uint32_t read_actlr(void)
543 {
544 	uint32_t actlr;
545 
546 	asm volatile ("mrc	p15, 0, %[actlr], c1, c0, 1"
547 			: [actlr] "=r" (actlr)
548 	);
549 
550 	return actlr;
551 }
552 
553 static inline void write_actlr(uint32_t actlr)
554 {
555 	asm volatile ("mcr	p15, 0, %[actlr], c1, c0, 1"
556 			: : [actlr] "r" (actlr)
557 	);
558 }
559 
560 static inline uint32_t read_nsacr(void)
561 {
562 	uint32_t nsacr;
563 
564 	asm volatile ("mrc	p15, 0, %[nsacr], c1, c1, 2"
565 			: [nsacr] "=r" (nsacr)
566 	);
567 
568 	return nsacr;
569 }
570 
571 static inline void write_nsacr(uint32_t nsacr)
572 {
573 	asm volatile ("mcr	p15, 0, %[nsacr], c1, c1, 2"
574 			: : [nsacr] "r" (nsacr)
575 	);
576 }
577 
578 static inline uint64_t read_cntpct(void)
579 {
580 	uint64_t val;
581 
582 	asm volatile("mrrc p15, 0, %Q0, %R0, c14" : "=r" (val));
583 	return val;
584 }
585 
586 static inline uint32_t read_cntfrq(void)
587 {
588 	uint32_t frq;
589 
590 	asm volatile("mrc p15, 0, %0, c14, c0, 0" : "=r" (frq));
591 	return frq;
592 }
593 
594 static inline void write_cntfrq(uint32_t frq)
595 {
596 	asm volatile("mcr p15, 0, %0, c14, c0, 0" : : "r" (frq));
597 }
598 
599 static inline uint32_t read_cntkctl(void)
600 {
601 	uint32_t cntkctl;
602 
603 	asm volatile("mrc p15, 0, %0, c14, c1, 0" : "=r" (cntkctl));
604 	return cntkctl;
605 }
606 
607 static inline void write_cntkctl(uint32_t cntkctl)
608 {
609 	asm volatile("mcr p15, 0, %0, c14, c1, 0" : : "r" (cntkctl));
610 }
611 
612 static __always_inline uint32_t read_pc(void)
613 {
614 	uint32_t val;
615 
616 	asm volatile ("adr %0, ." : "=r" (val));
617 	return val;
618 }
619 
620 static __always_inline uint32_t read_sp(void)
621 {
622 	uint32_t val;
623 
624 	asm volatile ("mov %0, sp" : "=r" (val));
625 	return val;
626 }
627 
628 static __always_inline uint32_t read_lr(void)
629 {
630 	uint32_t val;
631 
632 	asm volatile ("mov %0, lr" : "=r" (val));
633 	return val;
634 }
635 
636 static __always_inline uint32_t read_fp(void)
637 {
638 	uint32_t val;
639 
640 	asm volatile ("mov %0, fp" : "=r" (val));
641 	return val;
642 }
643 
644 static __always_inline uint32_t read_r7(void)
645 {
646 	uint32_t val;
647 
648 	asm volatile ("mov %0, r7" : "=r" (val));
649 	return val;
650 }
651 
652 /* Register read/write functions for GICC registers by using system interface */
653 static inline uint32_t read_icc_ctlr(void)
654 {
655 	uint32_t v;
656 
657 	asm volatile ("mrc p15,0,%0,c12,c12,4" : "=r" (v));
658 	return v;
659 }
660 
661 static inline void write_icc_ctlr(uint32_t v)
662 {
663 	asm volatile ("mcr p15,0,%0,c12,c12,4" : : "r" (v));
664 }
665 
666 static inline void write_icc_pmr(uint32_t v)
667 {
668 	asm volatile ("mcr p15,0,%0,c4,c6,0" : : "r" (v));
669 }
670 
671 static inline uint32_t read_icc_iar0(void)
672 {
673 	uint32_t v;
674 
675 	asm volatile ("mrc p15,0,%0,c12,c8,0" : "=r" (v));
676 	return v;
677 }
678 
679 static inline void write_icc_eoir0(uint32_t v)
680 {
681 	asm volatile ("mcr p15,0,%0,c12,c8,1" : : "r" (v));
682 }
683 
684 static inline uint64_t read_pmu_ccnt(void)
685 {
686 	uint32_t val;
687 
688 	asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r"(val));
689 	return val;
690 }
691 
692 static inline void wfi(void)
693 {
694 	asm volatile("wfi");
695 }
696 #endif /*ASM*/
697 
698 #endif /*ARM32_H*/
699