xref: /rk3399_ARM-atf/include/arch/aarch32/arch_helpers.h (revision 322107b1148c058a692fc2a759569dd3b49f3962)
1 /*
2  * Copyright (c) 2016-2024, ARM Limited and Contributors. All rights reserved.
3  * Portions copyright (c) 2021-2022, ProvenRun S.A.S. All rights reserved.
4  *
5  * SPDX-License-Identifier: BSD-3-Clause
6  */
7 
8 #ifndef ARCH_HELPERS_H
9 #define ARCH_HELPERS_H
10 
11 #include <assert.h>
12 #include <cdefs.h>
13 #include <stdbool.h>
14 #include <stdint.h>
15 #include <string.h>
16 
17 #include <arch.h>
18 
19 /**********************************************************************
20  * Macros which create inline functions to read or write CPU system
21  * registers
22  *********************************************************************/
23 
24 #define _DEFINE_COPROCR_WRITE_FUNC(_name, coproc, opc1, CRn, CRm, opc2)	\
25 static inline void write_## _name(u_register_t v)			\
26 {									\
27 	__asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
28 }
29 
30 #define _DEFINE_COPROCR_READ_FUNC(_name, coproc, opc1, CRn, CRm, opc2)	\
31 static inline u_register_t read_ ## _name(void)				\
32 {									\
33 	u_register_t v;							\
34 	__asm__ volatile ("mrc "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : "=r" (v));\
35 	return v;							\
36 }
37 
38 /*
39  *  The undocumented %Q and %R extended asm are used to implemented the below
40  *  64 bit `mrrc` and `mcrr` instructions.
41  */
42 
43 #define _DEFINE_COPROCR_WRITE_FUNC_64(_name, coproc, opc1, CRm)		\
44 static inline void write64_## _name(uint64_t v)				\
45 {									\
46 	__asm__ volatile ("mcrr "#coproc","#opc1", %Q0, %R0,"#CRm : : "r" (v));\
47 }
48 
49 #define _DEFINE_COPROCR_READ_FUNC_64(_name, coproc, opc1, CRm)		\
50 static inline uint64_t read64_## _name(void)				\
51 {	uint64_t v;							\
52 	__asm__ volatile ("mrrc "#coproc","#opc1", %Q0, %R0,"#CRm : "=r" (v));\
53 	return v;							\
54 }
55 
56 #define _DEFINE_SYSREG_READ_FUNC(_name, _reg_name)			\
57 static inline u_register_t read_ ## _name(void)				\
58 {									\
59 	u_register_t v;							\
60 	__asm__ volatile ("mrs %0, " #_reg_name : "=r" (v));		\
61 	return v;							\
62 }
63 
64 #define _DEFINE_SYSREG_WRITE_FUNC(_name, _reg_name)			\
65 static inline void write_ ## _name(u_register_t v)			\
66 {									\
67 	__asm__ volatile ("msr " #_reg_name ", %0" : : "r" (v));	\
68 }
69 
70 #define _DEFINE_SYSREG_WRITE_CONST_FUNC(_name, _reg_name)		\
71 static inline void write_ ## _name(const u_register_t v)		\
72 {									\
73 	__asm__ volatile ("msr " #_reg_name ", %0" : : "i" (v));	\
74 }
75 
76 /* Define read function for coproc register */
77 #define DEFINE_COPROCR_READ_FUNC(_name, ...) 				\
78 	_DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__)
79 
80 /* Define write function for coproc register */
81 #define DEFINE_COPROCR_WRITE_FUNC(_name, ...) 				\
82 	_DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__)
83 
84 /* Define read & write function for coproc register */
85 #define DEFINE_COPROCR_RW_FUNCS(_name, ...) 				\
86 	_DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__)			\
87 	_DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__)
88 
89 /* Define 64 bit read function for coproc register */
90 #define DEFINE_COPROCR_READ_FUNC_64(_name, ...) 			\
91 	_DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__)
92 
93 /* Define 64 bit write function for coproc register */
94 #define DEFINE_COPROCR_WRITE_FUNC_64(_name, ...) 			\
95 	_DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__)
96 
97 /* Define 64 bit read & write function for coproc register */
98 #define DEFINE_COPROCR_RW_FUNCS_64(_name, ...) 				\
99 	_DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__)		\
100 	_DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__)
101 
102 /* Define read & write function for system register */
103 #define DEFINE_SYSREG_RW_FUNCS(_name)					\
104 	_DEFINE_SYSREG_READ_FUNC(_name, _name)				\
105 	_DEFINE_SYSREG_WRITE_FUNC(_name, _name)
106 
107 /**********************************************************************
108  * Macros to create inline functions for tlbi operations
109  *********************************************************************/
110 
111 #define _DEFINE_TLBIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2)		\
112 static inline void tlbi##_op(void)					\
113 {									\
114 	u_register_t v = 0;						\
115 	__asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
116 }
117 
118 #define _DEFINE_BPIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2)		\
119 static inline void bpi##_op(void)					\
120 {									\
121 	u_register_t v = 0;						\
122 	__asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
123 }
124 
125 #define _DEFINE_TLBIOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2)	\
126 static inline void tlbi##_op(u_register_t v)				\
127 {									\
128 	__asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
129 }
130 
131 /* Define function for simple TLBI operation */
132 #define DEFINE_TLBIOP_FUNC(_op, ...)					\
133 	_DEFINE_TLBIOP_FUNC(_op, __VA_ARGS__)
134 
135 /* Define function for TLBI operation with register parameter */
136 #define DEFINE_TLBIOP_PARAM_FUNC(_op, ...)				\
137 	_DEFINE_TLBIOP_PARAM_FUNC(_op, __VA_ARGS__)
138 
139 /* Define function for simple BPI operation */
140 #define DEFINE_BPIOP_FUNC(_op, ...)					\
141 	_DEFINE_BPIOP_FUNC(_op, __VA_ARGS__)
142 
143 /**********************************************************************
144  * Macros to create inline functions for DC operations
145  *********************************************************************/
146 #define _DEFINE_DCOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2)	\
147 static inline void dc##_op(u_register_t v)				\
148 {									\
149 	__asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
150 }
151 
152 /* Define function for DC operation with register parameter */
153 #define DEFINE_DCOP_PARAM_FUNC(_op, ...)				\
154 	_DEFINE_DCOP_PARAM_FUNC(_op, __VA_ARGS__)
155 
156 /**********************************************************************
157  * Macros to create inline functions for system instructions
158  *********************************************************************/
159  /* Define function for simple system instruction */
160 #define DEFINE_SYSOP_FUNC(_op)						\
161 static inline void _op(void)						\
162 {									\
163 	__asm__ (#_op);							\
164 }
165 
166 
167 /* Define function for system instruction with type specifier */
168 #define DEFINE_SYSOP_TYPE_FUNC(_op, _type)				\
169 static inline void _op ## _type(void)					\
170 {									\
171 	__asm__ (#_op " " #_type : : : "memory");			\
172 }
173 
174 /* Define function for system instruction with register parameter */
175 #define DEFINE_SYSOP_TYPE_PARAM_FUNC(_op, _type)			\
176 static inline void _op ## _type(u_register_t v)				\
177 {									\
178 	 __asm__ (#_op " " #_type ", %0" : : "r" (v));			\
179 }
180 
181 void flush_dcache_range(uintptr_t addr, size_t size);
182 void clean_dcache_range(uintptr_t addr, size_t size);
183 void inv_dcache_range(uintptr_t addr, size_t size);
184 bool is_dcache_enabled(void);
185 
186 void dcsw_op_louis(u_register_t op_type);
187 void dcsw_op_all(u_register_t op_type);
188 
189 void disable_mmu_secure(void);
190 void disable_mmu_icache_secure(void);
191 
192 DEFINE_SYSOP_FUNC(wfi)
193 DEFINE_SYSOP_FUNC(wfe)
194 DEFINE_SYSOP_FUNC(sev)
195 DEFINE_SYSOP_TYPE_FUNC(dsb, sy)
196 DEFINE_SYSOP_TYPE_FUNC(dmb, sy)
197 DEFINE_SYSOP_TYPE_FUNC(dmb, st)
198 
199 /* dmb ld is not valid for armv7/thumb machines */
200 #if ARM_ARCH_MAJOR != 7
201 DEFINE_SYSOP_TYPE_FUNC(dmb, ld)
202 #endif
203 
204 DEFINE_SYSOP_TYPE_FUNC(dsb, ish)
205 DEFINE_SYSOP_TYPE_FUNC(dsb, ishst)
206 DEFINE_SYSOP_TYPE_FUNC(dmb, ish)
207 DEFINE_SYSOP_TYPE_FUNC(dmb, ishst)
208 DEFINE_SYSOP_FUNC(isb)
209 
210 void __dead2 smc(uint32_t r0, uint32_t r1, uint32_t r2, uint32_t r3,
211 		 uint32_t r4, uint32_t r5, uint32_t r6, uint32_t r7);
212 
213 DEFINE_SYSREG_RW_FUNCS(spsr)
214 DEFINE_SYSREG_RW_FUNCS(cpsr)
215 
216 /*******************************************************************************
217  * System register accessor prototypes
218  ******************************************************************************/
219 DEFINE_COPROCR_READ_FUNC(mpidr, MPIDR)
220 DEFINE_COPROCR_READ_FUNC(midr, MIDR)
221 DEFINE_COPROCR_READ_FUNC(id_mmfr3, ID_MMFR3)
222 DEFINE_COPROCR_READ_FUNC(id_mmfr4, ID_MMFR4)
223 DEFINE_COPROCR_READ_FUNC(id_dfr0, ID_DFR0)
224 DEFINE_COPROCR_READ_FUNC(id_dfr1, ID_DFR1)
225 DEFINE_COPROCR_READ_FUNC(id_pfr0, ID_PFR0)
226 DEFINE_COPROCR_READ_FUNC(id_pfr1, ID_PFR1)
227 DEFINE_COPROCR_READ_FUNC(id_pfr2, ID_PFR2)
228 DEFINE_COPROCR_READ_FUNC(isr, ISR)
229 DEFINE_COPROCR_READ_FUNC(clidr, CLIDR)
230 DEFINE_COPROCR_READ_FUNC_64(cntpct, CNTPCT_64)
231 
232 DEFINE_COPROCR_RW_FUNCS(scr, SCR)
233 DEFINE_COPROCR_RW_FUNCS(ctr, CTR)
234 DEFINE_COPROCR_RW_FUNCS(sctlr, SCTLR)
235 DEFINE_COPROCR_RW_FUNCS(actlr, ACTLR)
236 DEFINE_COPROCR_RW_FUNCS(hsctlr, HSCTLR)
237 DEFINE_COPROCR_RW_FUNCS(hcr, HCR)
238 DEFINE_COPROCR_RW_FUNCS(hcptr, HCPTR)
239 DEFINE_COPROCR_RW_FUNCS(cntfrq, CNTFRQ)
240 DEFINE_COPROCR_RW_FUNCS(cnthctl, CNTHCTL)
241 DEFINE_COPROCR_RW_FUNCS(mair0, MAIR0)
242 DEFINE_COPROCR_RW_FUNCS(mair1, MAIR1)
243 DEFINE_COPROCR_RW_FUNCS(hmair0, HMAIR0)
244 DEFINE_COPROCR_RW_FUNCS(ttbcr, TTBCR)
245 DEFINE_COPROCR_RW_FUNCS(htcr, HTCR)
246 DEFINE_COPROCR_RW_FUNCS(ttbr0, TTBR0)
247 DEFINE_COPROCR_RW_FUNCS_64(ttbr0, TTBR0_64)
248 DEFINE_COPROCR_RW_FUNCS(ttbr1, TTBR1)
249 DEFINE_COPROCR_RW_FUNCS_64(httbr, HTTBR_64)
250 DEFINE_COPROCR_RW_FUNCS(vpidr, VPIDR)
251 DEFINE_COPROCR_RW_FUNCS(vmpidr, VMPIDR)
252 DEFINE_COPROCR_RW_FUNCS_64(vttbr, VTTBR_64)
253 DEFINE_COPROCR_RW_FUNCS_64(ttbr1, TTBR1_64)
254 DEFINE_COPROCR_RW_FUNCS_64(cntvoff, CNTVOFF_64)
255 DEFINE_COPROCR_RW_FUNCS(csselr, CSSELR)
256 DEFINE_COPROCR_RW_FUNCS(hstr, HSTR)
257 DEFINE_COPROCR_RW_FUNCS(cnthp_ctl_el2, CNTHP_CTL)
258 DEFINE_COPROCR_RW_FUNCS(cnthp_tval_el2, CNTHP_TVAL)
259 DEFINE_COPROCR_RW_FUNCS_64(cnthp_cval_el2, CNTHP_CVAL_64)
260 
261 #define get_cntp_ctl_enable(x)  (((x) >> CNTP_CTL_ENABLE_SHIFT) & \
262 					 CNTP_CTL_ENABLE_MASK)
263 #define get_cntp_ctl_imask(x)   (((x) >> CNTP_CTL_IMASK_SHIFT) & \
264 					 CNTP_CTL_IMASK_MASK)
265 #define get_cntp_ctl_istatus(x) (((x) >> CNTP_CTL_ISTATUS_SHIFT) & \
266 					 CNTP_CTL_ISTATUS_MASK)
267 
268 #define set_cntp_ctl_enable(x)  ((x) |= U(1) << CNTP_CTL_ENABLE_SHIFT)
269 #define set_cntp_ctl_imask(x)   ((x) |= U(1) << CNTP_CTL_IMASK_SHIFT)
270 
271 #define clr_cntp_ctl_enable(x)  ((x) &= ~(U(1) << CNTP_CTL_ENABLE_SHIFT))
272 #define clr_cntp_ctl_imask(x)   ((x) &= ~(U(1) << CNTP_CTL_IMASK_SHIFT))
273 
274 DEFINE_COPROCR_RW_FUNCS(icc_sre_el1, ICC_SRE)
275 DEFINE_COPROCR_RW_FUNCS(icc_sre_el2, ICC_HSRE)
276 DEFINE_COPROCR_RW_FUNCS(icc_sre_el3, ICC_MSRE)
277 DEFINE_COPROCR_RW_FUNCS(icc_pmr_el1, ICC_PMR)
278 DEFINE_COPROCR_RW_FUNCS(icc_rpr_el1, ICC_RPR)
279 DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el3, ICC_MGRPEN1)
280 DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el1, ICC_IGRPEN1)
281 DEFINE_COPROCR_RW_FUNCS(icc_igrpen0_el1, ICC_IGRPEN0)
282 DEFINE_COPROCR_RW_FUNCS(icc_hppir0_el1, ICC_HPPIR0)
283 DEFINE_COPROCR_RW_FUNCS(icc_hppir1_el1, ICC_HPPIR1)
284 DEFINE_COPROCR_RW_FUNCS(icc_iar0_el1, ICC_IAR0)
285 DEFINE_COPROCR_RW_FUNCS(icc_iar1_el1, ICC_IAR1)
286 DEFINE_COPROCR_RW_FUNCS(icc_eoir0_el1, ICC_EOIR0)
287 DEFINE_COPROCR_RW_FUNCS(icc_eoir1_el1, ICC_EOIR1)
288 DEFINE_COPROCR_RW_FUNCS_64(icc_sgi0r_el1, ICC_SGI0R_EL1_64)
289 DEFINE_COPROCR_WRITE_FUNC_64(icc_sgi1r, ICC_SGI1R_EL1_64)
290 DEFINE_COPROCR_WRITE_FUNC_64(icc_asgi1r, ICC_ASGI1R_EL1_64)
291 
292 DEFINE_COPROCR_RW_FUNCS(sdcr, SDCR)
293 DEFINE_COPROCR_RW_FUNCS(hdcr, HDCR)
294 DEFINE_COPROCR_RW_FUNCS(cnthp_ctl, CNTHP_CTL)
295 DEFINE_COPROCR_RW_FUNCS(pmcr, PMCR)
296 
297 /*
298  * Address translation
299  */
300 DEFINE_COPROCR_WRITE_FUNC(ats1cpr, ATS1CPR)
301 DEFINE_COPROCR_WRITE_FUNC(ats1hr, ATS1HR)
302 DEFINE_COPROCR_RW_FUNCS_64(par, PAR_64)
303 
304 DEFINE_COPROCR_RW_FUNCS(nsacr, NSACR)
305 
306 /* AArch32 coproc registers for 32bit MMU descriptor support */
307 DEFINE_COPROCR_RW_FUNCS(prrr, PRRR)
308 DEFINE_COPROCR_RW_FUNCS(nmrr, NMRR)
309 DEFINE_COPROCR_RW_FUNCS(dacr, DACR)
310 
311 /* Coproc registers for 32bit AMU support */
312 DEFINE_COPROCR_READ_FUNC(amcfgr, AMCFGR)
313 DEFINE_COPROCR_READ_FUNC(amcgcr, AMCGCR)
314 DEFINE_COPROCR_RW_FUNCS(amcr, AMCR)
315 
316 DEFINE_COPROCR_RW_FUNCS(amcntenset0, AMCNTENSET0)
317 DEFINE_COPROCR_RW_FUNCS(amcntenset1, AMCNTENSET1)
318 DEFINE_COPROCR_RW_FUNCS(amcntenclr0, AMCNTENCLR0)
319 DEFINE_COPROCR_RW_FUNCS(amcntenclr1, AMCNTENCLR1)
320 
321 /* Coproc registers for 64bit AMU support */
322 DEFINE_COPROCR_RW_FUNCS_64(amevcntr00, AMEVCNTR00)
323 DEFINE_COPROCR_RW_FUNCS_64(amevcntr01, AMEVCNTR01)
324 DEFINE_COPROCR_RW_FUNCS_64(amevcntr02, AMEVCNTR02)
325 DEFINE_COPROCR_RW_FUNCS_64(amevcntr03, AMEVCNTR03)
326 
327 DEFINE_COPROCR_RW_FUNCS_64(amevcntr10, AMEVCNTR10);
328 DEFINE_COPROCR_RW_FUNCS_64(amevcntr11, AMEVCNTR11);
329 DEFINE_COPROCR_RW_FUNCS_64(amevcntr12, AMEVCNTR12);
330 DEFINE_COPROCR_RW_FUNCS_64(amevcntr13, AMEVCNTR13);
331 DEFINE_COPROCR_RW_FUNCS_64(amevcntr14, AMEVCNTR14);
332 DEFINE_COPROCR_RW_FUNCS_64(amevcntr15, AMEVCNTR15);
333 DEFINE_COPROCR_RW_FUNCS_64(amevcntr16, AMEVCNTR16);
334 DEFINE_COPROCR_RW_FUNCS_64(amevcntr17, AMEVCNTR17);
335 DEFINE_COPROCR_RW_FUNCS_64(amevcntr18, AMEVCNTR18);
336 DEFINE_COPROCR_RW_FUNCS_64(amevcntr19, AMEVCNTR19);
337 DEFINE_COPROCR_RW_FUNCS_64(amevcntr1a, AMEVCNTR1A);
338 DEFINE_COPROCR_RW_FUNCS_64(amevcntr1b, AMEVCNTR1B);
339 DEFINE_COPROCR_RW_FUNCS_64(amevcntr1c, AMEVCNTR1C);
340 DEFINE_COPROCR_RW_FUNCS_64(amevcntr1d, AMEVCNTR1D);
341 DEFINE_COPROCR_RW_FUNCS_64(amevcntr1e, AMEVCNTR1E);
342 DEFINE_COPROCR_RW_FUNCS_64(amevcntr1f, AMEVCNTR1F);
343 
344 /*
345  * TLBI operation prototypes
346  */
347 DEFINE_TLBIOP_FUNC(all, TLBIALL)
348 DEFINE_TLBIOP_FUNC(allis, TLBIALLIS)
349 DEFINE_TLBIOP_PARAM_FUNC(mva, TLBIMVA)
350 DEFINE_TLBIOP_PARAM_FUNC(mvaa, TLBIMVAA)
351 DEFINE_TLBIOP_PARAM_FUNC(mvaais, TLBIMVAAIS)
352 DEFINE_TLBIOP_PARAM_FUNC(mvahis, TLBIMVAHIS)
353 
354 /*
355  * BPI operation prototypes.
356  */
357 DEFINE_BPIOP_FUNC(allis, BPIALLIS)
358 
359 /*
360  * DC operation prototypes
361  */
362 DEFINE_DCOP_PARAM_FUNC(civac, DCCIMVAC)
363 DEFINE_DCOP_PARAM_FUNC(ivac, DCIMVAC)
364 #if ERRATA_A53_819472 || ERRATA_A53_824069 || ERRATA_A53_827319
365 DEFINE_DCOP_PARAM_FUNC(cvac, DCCIMVAC)
366 #else
367 DEFINE_DCOP_PARAM_FUNC(cvac, DCCMVAC)
368 #endif
369 
370 /*
371  * DynamIQ Shared Unit power management
372  */
373 DEFINE_COPROCR_RW_FUNCS(clusterpwrdn, CLUSTERPWRDN)
374 DEFINE_COPROCR_RW_FUNCS(clusterpmcr, CLUSTERPMCR)
375 DEFINE_COPROCR_RW_FUNCS(clusterpmcntenset, CLUSTERPMCNTENSET)
376 DEFINE_COPROCR_RW_FUNCS(clusterpmccntr, CLUSTERPMCCNTR)
377 DEFINE_COPROCR_RW_FUNCS(clusterpmovsset, CLUSTERPMOVSSET)
378 DEFINE_COPROCR_RW_FUNCS(clusterpmovsclr, CLUSTERPMOVSCLR)
379 DEFINE_COPROCR_RW_FUNCS(clusterpmselr, CLUSTERPMSELR)
380 DEFINE_COPROCR_RW_FUNCS(clusterpmxevcntr, CLUSTERPMXEVCNTR)
381 DEFINE_COPROCR_RW_FUNCS(clusterpmxevtyper, CLUSTERPMXEVTYPER)
382 DEFINE_COPROCR_RW_FUNCS(clusterpmmdcr, CLUSTERPMMDCR)
383 
384 /*
385  * RNDR is AArch64 only, so just provide a placeholder here to make the
386  * linker happy.
387  */
388 static inline u_register_t read_rndr(void)
389 {
390 	assert(1);
391 
392 	return 0;
393 }
394 
395 /* Previously defined accessor functions with incomplete register names  */
396 #define dsb()			dsbsy()
397 #define dmb()			dmbsy()
398 
399 /* dmb ld is not valid for armv7/thumb machines, so alias it to dmb */
400 #if ARM_ARCH_MAJOR == 7
401 #define	dmbld()			dmb()
402 #endif
403 
404 #define IS_IN_SECURE() \
405 	(GET_NS_BIT(read_scr()) == 0)
406 
407 #define IS_IN_HYP()	(GET_M32(read_cpsr()) == MODE32_hyp)
408 #define IS_IN_SVC()	(GET_M32(read_cpsr()) == MODE32_svc)
409 #define IS_IN_MON()	(GET_M32(read_cpsr()) == MODE32_mon)
410 #define IS_IN_EL2()	IS_IN_HYP()
411 /* If EL3 is AArch32, then secure PL1 and monitor mode correspond to EL3 */
412 #define IS_IN_EL3() \
413 	((GET_M32(read_cpsr()) == MODE32_mon) ||	\
414 		(IS_IN_SECURE() && (GET_M32(read_cpsr()) != MODE32_usr)))
415 
416 static inline unsigned int get_current_el(void)
417 {
418 	if (IS_IN_EL3()) {
419 		return 3U;
420 	} else if (IS_IN_EL2()) {
421 		return 2U;
422 	} else {
423 		return 1U;
424 	}
425 }
426 
427 /* Macros for compatibility with AArch64 system registers */
428 #define read_mpidr_el1()	read_mpidr()
429 
430 #define read_scr_el3()		read_scr()
431 #define write_scr_el3(_v)	write_scr(_v)
432 
433 #define read_hcr_el2()		read_hcr()
434 #define write_hcr_el2(_v)	write_hcr(_v)
435 
436 #define read_cpacr_el1()	read_cpacr()
437 #define write_cpacr_el1(_v)	write_cpacr(_v)
438 
439 #define read_cntfrq_el0()	read_cntfrq()
440 #define write_cntfrq_el0(_v)	write_cntfrq(_v)
441 #define read_isr_el1()		read_isr()
442 
443 #define read_cntpct_el0()	read64_cntpct()
444 
445 #define read_ctr_el0()		read_ctr()
446 
447 #define write_icc_sgi0r_el1(_v)	write64_icc_sgi0r_el1(_v)
448 #define write_icc_sgi1r(_v)	write64_icc_sgi1r(_v)
449 #define write_icc_asgi1r(_v)	write64_icc_asgi1r(_v)
450 
451 #define read_daif()		read_cpsr()
452 #define write_daif(flags)	write_cpsr(flags)
453 
454 #define read_cnthp_cval_el2()	read64_cnthp_cval_el2()
455 #define write_cnthp_cval_el2(v)	write64_cnthp_cval_el2(v)
456 
457 #define read_amcntenset0_el0()	read_amcntenset0()
458 #define read_amcntenset1_el0()	read_amcntenset1()
459 
460 #define read_clusterpmmdcr_el3()	read_clusterpmmdcr()
461 #define write_clusterpmmdcr_el3(_v)	write_clusterpmmdcr(_v)
462 
463 /* Helper functions to manipulate CPSR */
464 static inline void enable_irq(void)
465 {
466 	/*
467 	 * The compiler memory barrier will prevent the compiler from
468 	 * scheduling non-volatile memory access after the write to the
469 	 * register.
470 	 *
471 	 * This could happen if some initialization code issues non-volatile
472 	 * accesses to an area used by an interrupt handler, in the assumption
473 	 * that it is safe as the interrupts are disabled at the time it does
474 	 * that (according to program order). However, non-volatile accesses
475 	 * are not necessarily in program order relatively with volatile inline
476 	 * assembly statements (and volatile accesses).
477 	 */
478 	COMPILER_BARRIER();
479 	__asm__ volatile ("cpsie	i");
480 	isb();
481 }
482 
483 static inline void enable_serror(void)
484 {
485 	COMPILER_BARRIER();
486 	__asm__ volatile ("cpsie	a");
487 	isb();
488 }
489 
490 static inline void enable_fiq(void)
491 {
492 	COMPILER_BARRIER();
493 	__asm__ volatile ("cpsie	f");
494 	isb();
495 }
496 
497 static inline void disable_irq(void)
498 {
499 	COMPILER_BARRIER();
500 	__asm__ volatile ("cpsid	i");
501 	isb();
502 }
503 
504 static inline void disable_serror(void)
505 {
506 	COMPILER_BARRIER();
507 	__asm__ volatile ("cpsid	a");
508 	isb();
509 }
510 
511 static inline void disable_fiq(void)
512 {
513 	COMPILER_BARRIER();
514 	__asm__ volatile ("cpsid	f");
515 	isb();
516 }
517 
518 #endif /* ARCH_HELPERS_H */
519