xref: /rk3399_ARM-atf/include/arch/aarch32/arch_helpers.h (revision ab23061eb0c93164259cb3ee70f288f061679620)
1 /*
2  * Copyright (c) 2016-2021, ARM Limited and Contributors. All rights reserved.
3  * Portions copyright (c) 2021-2022, ProvenRun S.A.S. All rights reserved.
4  *
5  * SPDX-License-Identifier: BSD-3-Clause
6  */
7 
8 #ifndef ARCH_HELPERS_H
9 #define ARCH_HELPERS_H
10 
11 #include <cdefs.h>
12 #include <stdbool.h>
13 #include <stdint.h>
14 #include <string.h>
15 
16 #include <arch.h>
17 
18 /**********************************************************************
19  * Macros which create inline functions to read or write CPU system
20  * registers
21  *********************************************************************/
22 
23 #define _DEFINE_COPROCR_WRITE_FUNC(_name, coproc, opc1, CRn, CRm, opc2)	\
24 static inline void write_## _name(u_register_t v)			\
25 {									\
26 	__asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
27 }
28 
29 #define _DEFINE_COPROCR_READ_FUNC(_name, coproc, opc1, CRn, CRm, opc2)	\
30 static inline u_register_t read_ ## _name(void)				\
31 {									\
32 	u_register_t v;							\
33 	__asm__ volatile ("mrc "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : "=r" (v));\
34 	return v;							\
35 }
36 
37 /*
38  *  The undocumented %Q and %R extended asm are used to implemented the below
39  *  64 bit `mrrc` and `mcrr` instructions.
40  */
41 
42 #define _DEFINE_COPROCR_WRITE_FUNC_64(_name, coproc, opc1, CRm)		\
43 static inline void write64_## _name(uint64_t v)				\
44 {									\
45 	__asm__ volatile ("mcrr "#coproc","#opc1", %Q0, %R0,"#CRm : : "r" (v));\
46 }
47 
48 #define _DEFINE_COPROCR_READ_FUNC_64(_name, coproc, opc1, CRm)		\
49 static inline uint64_t read64_## _name(void)				\
50 {	uint64_t v;							\
51 	__asm__ volatile ("mrrc "#coproc","#opc1", %Q0, %R0,"#CRm : "=r" (v));\
52 	return v;							\
53 }
54 
55 #define _DEFINE_SYSREG_READ_FUNC(_name, _reg_name)			\
56 static inline u_register_t read_ ## _name(void)				\
57 {									\
58 	u_register_t v;							\
59 	__asm__ volatile ("mrs %0, " #_reg_name : "=r" (v));		\
60 	return v;							\
61 }
62 
63 #define _DEFINE_SYSREG_WRITE_FUNC(_name, _reg_name)			\
64 static inline void write_ ## _name(u_register_t v)			\
65 {									\
66 	__asm__ volatile ("msr " #_reg_name ", %0" : : "r" (v));	\
67 }
68 
69 #define _DEFINE_SYSREG_WRITE_CONST_FUNC(_name, _reg_name)		\
70 static inline void write_ ## _name(const u_register_t v)		\
71 {									\
72 	__asm__ volatile ("msr " #_reg_name ", %0" : : "i" (v));	\
73 }
74 
75 /* Define read function for coproc register */
76 #define DEFINE_COPROCR_READ_FUNC(_name, ...) 				\
77 	_DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__)
78 
79 /* Define write function for coproc register */
80 #define DEFINE_COPROCR_WRITE_FUNC(_name, ...) 				\
81 	_DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__)
82 
83 /* Define read & write function for coproc register */
84 #define DEFINE_COPROCR_RW_FUNCS(_name, ...) 				\
85 	_DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__)			\
86 	_DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__)
87 
88 /* Define 64 bit read function for coproc register */
89 #define DEFINE_COPROCR_READ_FUNC_64(_name, ...) 			\
90 	_DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__)
91 
92 /* Define 64 bit write function for coproc register */
93 #define DEFINE_COPROCR_WRITE_FUNC_64(_name, ...) 			\
94 	_DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__)
95 
96 /* Define 64 bit read & write function for coproc register */
97 #define DEFINE_COPROCR_RW_FUNCS_64(_name, ...) 				\
98 	_DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__)		\
99 	_DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__)
100 
101 /* Define read & write function for system register */
102 #define DEFINE_SYSREG_RW_FUNCS(_name)					\
103 	_DEFINE_SYSREG_READ_FUNC(_name, _name)				\
104 	_DEFINE_SYSREG_WRITE_FUNC(_name, _name)
105 
106 /**********************************************************************
107  * Macros to create inline functions for tlbi operations
108  *********************************************************************/
109 
110 #define _DEFINE_TLBIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2)		\
111 static inline void tlbi##_op(void)					\
112 {									\
113 	u_register_t v = 0;						\
114 	__asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
115 }
116 
117 #define _DEFINE_BPIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2)		\
118 static inline void bpi##_op(void)					\
119 {									\
120 	u_register_t v = 0;						\
121 	__asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
122 }
123 
124 #define _DEFINE_TLBIOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2)	\
125 static inline void tlbi##_op(u_register_t v)				\
126 {									\
127 	__asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
128 }
129 
130 /* Define function for simple TLBI operation */
131 #define DEFINE_TLBIOP_FUNC(_op, ...)					\
132 	_DEFINE_TLBIOP_FUNC(_op, __VA_ARGS__)
133 
134 /* Define function for TLBI operation with register parameter */
135 #define DEFINE_TLBIOP_PARAM_FUNC(_op, ...)				\
136 	_DEFINE_TLBIOP_PARAM_FUNC(_op, __VA_ARGS__)
137 
138 /* Define function for simple BPI operation */
139 #define DEFINE_BPIOP_FUNC(_op, ...)					\
140 	_DEFINE_BPIOP_FUNC(_op, __VA_ARGS__)
141 
142 /**********************************************************************
143  * Macros to create inline functions for DC operations
144  *********************************************************************/
145 #define _DEFINE_DCOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2)	\
146 static inline void dc##_op(u_register_t v)				\
147 {									\
148 	__asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
149 }
150 
151 /* Define function for DC operation with register parameter */
152 #define DEFINE_DCOP_PARAM_FUNC(_op, ...)				\
153 	_DEFINE_DCOP_PARAM_FUNC(_op, __VA_ARGS__)
154 
155 /**********************************************************************
156  * Macros to create inline functions for system instructions
157  *********************************************************************/
158  /* Define function for simple system instruction */
159 #define DEFINE_SYSOP_FUNC(_op)						\
160 static inline void _op(void)						\
161 {									\
162 	__asm__ (#_op);							\
163 }
164 
165 
166 /* Define function for system instruction with type specifier */
167 #define DEFINE_SYSOP_TYPE_FUNC(_op, _type)				\
168 static inline void _op ## _type(void)					\
169 {									\
170 	__asm__ (#_op " " #_type : : : "memory");			\
171 }
172 
173 /* Define function for system instruction with register parameter */
174 #define DEFINE_SYSOP_TYPE_PARAM_FUNC(_op, _type)			\
175 static inline void _op ## _type(u_register_t v)				\
176 {									\
177 	 __asm__ (#_op " " #_type ", %0" : : "r" (v));			\
178 }
179 
180 void flush_dcache_range(uintptr_t addr, size_t size);
181 void clean_dcache_range(uintptr_t addr, size_t size);
182 void inv_dcache_range(uintptr_t addr, size_t size);
183 bool is_dcache_enabled(void);
184 
185 void dcsw_op_louis(u_register_t op_type);
186 void dcsw_op_all(u_register_t op_type);
187 
188 void disable_mmu_secure(void);
189 void disable_mmu_icache_secure(void);
190 
191 DEFINE_SYSOP_FUNC(wfi)
192 DEFINE_SYSOP_FUNC(wfe)
193 DEFINE_SYSOP_FUNC(sev)
194 DEFINE_SYSOP_TYPE_FUNC(dsb, sy)
195 DEFINE_SYSOP_TYPE_FUNC(dmb, sy)
196 DEFINE_SYSOP_TYPE_FUNC(dmb, st)
197 
198 /* dmb ld is not valid for armv7/thumb machines */
199 #if ARM_ARCH_MAJOR != 7
200 DEFINE_SYSOP_TYPE_FUNC(dmb, ld)
201 #endif
202 
203 DEFINE_SYSOP_TYPE_FUNC(dsb, ish)
204 DEFINE_SYSOP_TYPE_FUNC(dsb, ishst)
205 DEFINE_SYSOP_TYPE_FUNC(dmb, ish)
206 DEFINE_SYSOP_TYPE_FUNC(dmb, ishst)
207 DEFINE_SYSOP_FUNC(isb)
208 
209 void __dead2 smc(uint32_t r0, uint32_t r1, uint32_t r2, uint32_t r3,
210 		 uint32_t r4, uint32_t r5, uint32_t r6, uint32_t r7);
211 
212 DEFINE_SYSREG_RW_FUNCS(spsr)
213 DEFINE_SYSREG_RW_FUNCS(cpsr)
214 
215 /*******************************************************************************
216  * System register accessor prototypes
217  ******************************************************************************/
218 DEFINE_COPROCR_READ_FUNC(mpidr, MPIDR)
219 DEFINE_COPROCR_READ_FUNC(midr, MIDR)
220 DEFINE_COPROCR_READ_FUNC(id_mmfr3, ID_MMFR3)
221 DEFINE_COPROCR_READ_FUNC(id_mmfr4, ID_MMFR4)
222 DEFINE_COPROCR_READ_FUNC(id_dfr0, ID_DFR0)
223 DEFINE_COPROCR_READ_FUNC(id_pfr0, ID_PFR0)
224 DEFINE_COPROCR_READ_FUNC(id_pfr1, ID_PFR1)
225 DEFINE_COPROCR_READ_FUNC(isr, ISR)
226 DEFINE_COPROCR_READ_FUNC(clidr, CLIDR)
227 DEFINE_COPROCR_READ_FUNC_64(cntpct, CNTPCT_64)
228 
229 DEFINE_COPROCR_RW_FUNCS(scr, SCR)
230 DEFINE_COPROCR_RW_FUNCS(ctr, CTR)
231 DEFINE_COPROCR_RW_FUNCS(sctlr, SCTLR)
232 DEFINE_COPROCR_RW_FUNCS(actlr, ACTLR)
233 DEFINE_COPROCR_RW_FUNCS(hsctlr, HSCTLR)
234 DEFINE_COPROCR_RW_FUNCS(hcr, HCR)
235 DEFINE_COPROCR_RW_FUNCS(hcptr, HCPTR)
236 DEFINE_COPROCR_RW_FUNCS(cntfrq, CNTFRQ)
237 DEFINE_COPROCR_RW_FUNCS(cnthctl, CNTHCTL)
238 DEFINE_COPROCR_RW_FUNCS(mair0, MAIR0)
239 DEFINE_COPROCR_RW_FUNCS(mair1, MAIR1)
240 DEFINE_COPROCR_RW_FUNCS(hmair0, HMAIR0)
241 DEFINE_COPROCR_RW_FUNCS(ttbcr, TTBCR)
242 DEFINE_COPROCR_RW_FUNCS(htcr, HTCR)
243 DEFINE_COPROCR_RW_FUNCS(ttbr0, TTBR0)
244 DEFINE_COPROCR_RW_FUNCS_64(ttbr0, TTBR0_64)
245 DEFINE_COPROCR_RW_FUNCS(ttbr1, TTBR1)
246 DEFINE_COPROCR_RW_FUNCS_64(httbr, HTTBR_64)
247 DEFINE_COPROCR_RW_FUNCS(vpidr, VPIDR)
248 DEFINE_COPROCR_RW_FUNCS(vmpidr, VMPIDR)
249 DEFINE_COPROCR_RW_FUNCS_64(vttbr, VTTBR_64)
250 DEFINE_COPROCR_RW_FUNCS_64(ttbr1, TTBR1_64)
251 DEFINE_COPROCR_RW_FUNCS_64(cntvoff, CNTVOFF_64)
252 DEFINE_COPROCR_RW_FUNCS(csselr, CSSELR)
253 DEFINE_COPROCR_RW_FUNCS(hstr, HSTR)
254 DEFINE_COPROCR_RW_FUNCS(cnthp_ctl_el2, CNTHP_CTL)
255 DEFINE_COPROCR_RW_FUNCS(cnthp_tval_el2, CNTHP_TVAL)
256 DEFINE_COPROCR_RW_FUNCS_64(cnthp_cval_el2, CNTHP_CVAL_64)
257 
258 #define get_cntp_ctl_enable(x)  (((x) >> CNTP_CTL_ENABLE_SHIFT) & \
259 					 CNTP_CTL_ENABLE_MASK)
260 #define get_cntp_ctl_imask(x)   (((x) >> CNTP_CTL_IMASK_SHIFT) & \
261 					 CNTP_CTL_IMASK_MASK)
262 #define get_cntp_ctl_istatus(x) (((x) >> CNTP_CTL_ISTATUS_SHIFT) & \
263 					 CNTP_CTL_ISTATUS_MASK)
264 
265 #define set_cntp_ctl_enable(x)  ((x) |= U(1) << CNTP_CTL_ENABLE_SHIFT)
266 #define set_cntp_ctl_imask(x)   ((x) |= U(1) << CNTP_CTL_IMASK_SHIFT)
267 
268 #define clr_cntp_ctl_enable(x)  ((x) &= ~(U(1) << CNTP_CTL_ENABLE_SHIFT))
269 #define clr_cntp_ctl_imask(x)   ((x) &= ~(U(1) << CNTP_CTL_IMASK_SHIFT))
270 
271 DEFINE_COPROCR_RW_FUNCS(icc_sre_el1, ICC_SRE)
272 DEFINE_COPROCR_RW_FUNCS(icc_sre_el2, ICC_HSRE)
273 DEFINE_COPROCR_RW_FUNCS(icc_sre_el3, ICC_MSRE)
274 DEFINE_COPROCR_RW_FUNCS(icc_pmr_el1, ICC_PMR)
275 DEFINE_COPROCR_RW_FUNCS(icc_rpr_el1, ICC_RPR)
276 DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el3, ICC_MGRPEN1)
277 DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el1, ICC_IGRPEN1)
278 DEFINE_COPROCR_RW_FUNCS(icc_igrpen0_el1, ICC_IGRPEN0)
279 DEFINE_COPROCR_RW_FUNCS(icc_hppir0_el1, ICC_HPPIR0)
280 DEFINE_COPROCR_RW_FUNCS(icc_hppir1_el1, ICC_HPPIR1)
281 DEFINE_COPROCR_RW_FUNCS(icc_iar0_el1, ICC_IAR0)
282 DEFINE_COPROCR_RW_FUNCS(icc_iar1_el1, ICC_IAR1)
283 DEFINE_COPROCR_RW_FUNCS(icc_eoir0_el1, ICC_EOIR0)
284 DEFINE_COPROCR_RW_FUNCS(icc_eoir1_el1, ICC_EOIR1)
285 DEFINE_COPROCR_RW_FUNCS_64(icc_sgi0r_el1, ICC_SGI0R_EL1_64)
286 DEFINE_COPROCR_WRITE_FUNC_64(icc_sgi1r, ICC_SGI1R_EL1_64)
287 DEFINE_COPROCR_WRITE_FUNC_64(icc_asgi1r, ICC_ASGI1R_EL1_64)
288 
289 DEFINE_COPROCR_RW_FUNCS(sdcr, SDCR)
290 DEFINE_COPROCR_RW_FUNCS(hdcr, HDCR)
291 DEFINE_COPROCR_RW_FUNCS(cnthp_ctl, CNTHP_CTL)
292 DEFINE_COPROCR_READ_FUNC(pmcr, PMCR)
293 
294 /*
295  * Address translation
296  */
297 DEFINE_COPROCR_WRITE_FUNC(ats1cpr, ATS1CPR)
298 DEFINE_COPROCR_WRITE_FUNC(ats1hr, ATS1HR)
299 DEFINE_COPROCR_RW_FUNCS_64(par, PAR_64)
300 
301 DEFINE_COPROCR_RW_FUNCS(nsacr, NSACR)
302 
303 /* AArch32 coproc registers for 32bit MMU descriptor support */
304 DEFINE_COPROCR_RW_FUNCS(prrr, PRRR)
305 DEFINE_COPROCR_RW_FUNCS(nmrr, NMRR)
306 DEFINE_COPROCR_RW_FUNCS(dacr, DACR)
307 
308 /* Coproc registers for 32bit AMU support */
309 DEFINE_COPROCR_READ_FUNC(amcfgr, AMCFGR)
310 DEFINE_COPROCR_READ_FUNC(amcgcr, AMCGCR)
311 DEFINE_COPROCR_RW_FUNCS(amcr, AMCR)
312 
313 DEFINE_COPROCR_RW_FUNCS(amcntenset0, AMCNTENSET0)
314 DEFINE_COPROCR_RW_FUNCS(amcntenset1, AMCNTENSET1)
315 DEFINE_COPROCR_RW_FUNCS(amcntenclr0, AMCNTENCLR0)
316 DEFINE_COPROCR_RW_FUNCS(amcntenclr1, AMCNTENCLR1)
317 
318 /* Coproc registers for 64bit AMU support */
319 DEFINE_COPROCR_RW_FUNCS_64(amevcntr00, AMEVCNTR00)
320 DEFINE_COPROCR_RW_FUNCS_64(amevcntr01, AMEVCNTR01)
321 DEFINE_COPROCR_RW_FUNCS_64(amevcntr02, AMEVCNTR02)
322 DEFINE_COPROCR_RW_FUNCS_64(amevcntr03, AMEVCNTR03)
323 
324 /*
325  * TLBI operation prototypes
326  */
327 DEFINE_TLBIOP_FUNC(all, TLBIALL)
328 DEFINE_TLBIOP_FUNC(allis, TLBIALLIS)
329 DEFINE_TLBIOP_PARAM_FUNC(mva, TLBIMVA)
330 DEFINE_TLBIOP_PARAM_FUNC(mvaa, TLBIMVAA)
331 DEFINE_TLBIOP_PARAM_FUNC(mvaais, TLBIMVAAIS)
332 DEFINE_TLBIOP_PARAM_FUNC(mvahis, TLBIMVAHIS)
333 
334 /*
335  * BPI operation prototypes.
336  */
337 DEFINE_BPIOP_FUNC(allis, BPIALLIS)
338 
339 /*
340  * DC operation prototypes
341  */
342 DEFINE_DCOP_PARAM_FUNC(civac, DCCIMVAC)
343 DEFINE_DCOP_PARAM_FUNC(ivac, DCIMVAC)
344 #if ERRATA_A53_819472 || ERRATA_A53_824069 || ERRATA_A53_827319
345 DEFINE_DCOP_PARAM_FUNC(cvac, DCCIMVAC)
346 #else
347 DEFINE_DCOP_PARAM_FUNC(cvac, DCCMVAC)
348 #endif
349 
350 /*
351  * DynamIQ Shared Unit power management
352  */
353 DEFINE_COPROCR_RW_FUNCS(clusterpwrdn, CLUSTERPWRDN)
354 
355 /* Previously defined accessor functions with incomplete register names  */
356 #define dsb()			dsbsy()
357 #define dmb()			dmbsy()
358 
359 /* dmb ld is not valid for armv7/thumb machines, so alias it to dmb */
360 #if ARM_ARCH_MAJOR == 7
361 #define	dmbld()			dmb()
362 #endif
363 
364 #define IS_IN_SECURE() \
365 	(GET_NS_BIT(read_scr()) == 0)
366 
367 #define IS_IN_HYP()	(GET_M32(read_cpsr()) == MODE32_hyp)
368 #define IS_IN_SVC()	(GET_M32(read_cpsr()) == MODE32_svc)
369 #define IS_IN_MON()	(GET_M32(read_cpsr()) == MODE32_mon)
370 #define IS_IN_EL2()	IS_IN_HYP()
371 /* If EL3 is AArch32, then secure PL1 and monitor mode correspond to EL3 */
372 #define IS_IN_EL3() \
373 	((GET_M32(read_cpsr()) == MODE32_mon) ||	\
374 		(IS_IN_SECURE() && (GET_M32(read_cpsr()) != MODE32_usr)))
375 
376 static inline unsigned int get_current_el(void)
377 {
378 	if (IS_IN_EL3()) {
379 		return 3U;
380 	} else if (IS_IN_EL2()) {
381 		return 2U;
382 	} else {
383 		return 1U;
384 	}
385 }
386 
387 /* Macros for compatibility with AArch64 system registers */
388 #define read_mpidr_el1()	read_mpidr()
389 
390 #define read_scr_el3()		read_scr()
391 #define write_scr_el3(_v)	write_scr(_v)
392 
393 #define read_hcr_el2()		read_hcr()
394 #define write_hcr_el2(_v)	write_hcr(_v)
395 
396 #define read_cpacr_el1()	read_cpacr()
397 #define write_cpacr_el1(_v)	write_cpacr(_v)
398 
399 #define read_cntfrq_el0()	read_cntfrq()
400 #define write_cntfrq_el0(_v)	write_cntfrq(_v)
401 #define read_isr_el1()		read_isr()
402 
403 #define read_cntpct_el0()	read64_cntpct()
404 
405 #define read_ctr_el0()		read_ctr()
406 
407 #define write_icc_sgi0r_el1(_v)	write64_icc_sgi0r_el1(_v)
408 #define write_icc_sgi1r(_v)	write64_icc_sgi1r(_v)
409 #define write_icc_asgi1r(_v)	write64_icc_asgi1r(_v)
410 
411 #define read_daif()		read_cpsr()
412 #define write_daif(flags)	write_cpsr(flags)
413 
414 #define read_cnthp_cval_el2()	read64_cnthp_cval_el2()
415 #define write_cnthp_cval_el2(v)	write64_cnthp_cval_el2(v)
416 
417 #define read_amcntenset0_el0()	read_amcntenset0()
418 #define read_amcntenset1_el0()	read_amcntenset1()
419 
420 /* Helper functions to manipulate CPSR */
421 static inline void enable_irq(void)
422 {
423 	/*
424 	 * The compiler memory barrier will prevent the compiler from
425 	 * scheduling non-volatile memory access after the write to the
426 	 * register.
427 	 *
428 	 * This could happen if some initialization code issues non-volatile
429 	 * accesses to an area used by an interrupt handler, in the assumption
430 	 * that it is safe as the interrupts are disabled at the time it does
431 	 * that (according to program order). However, non-volatile accesses
432 	 * are not necessarily in program order relatively with volatile inline
433 	 * assembly statements (and volatile accesses).
434 	 */
435 	COMPILER_BARRIER();
436 	__asm__ volatile ("cpsie	i");
437 	isb();
438 }
439 
440 static inline void enable_serror(void)
441 {
442 	COMPILER_BARRIER();
443 	__asm__ volatile ("cpsie	a");
444 	isb();
445 }
446 
447 static inline void enable_fiq(void)
448 {
449 	COMPILER_BARRIER();
450 	__asm__ volatile ("cpsie	f");
451 	isb();
452 }
453 
454 static inline void disable_irq(void)
455 {
456 	COMPILER_BARRIER();
457 	__asm__ volatile ("cpsid	i");
458 	isb();
459 }
460 
461 static inline void disable_serror(void)
462 {
463 	COMPILER_BARRIER();
464 	__asm__ volatile ("cpsid	a");
465 	isb();
466 }
467 
468 static inline void disable_fiq(void)
469 {
470 	COMPILER_BARRIER();
471 	__asm__ volatile ("cpsid	f");
472 	isb();
473 }
474 
475 #endif /* ARCH_HELPERS_H */
476