xref: /rk3399_ARM-atf/include/arch/aarch32/arch_helpers.h (revision f5478dedf9e096d9539362b38ceb096b940ba3e2)
1 /*
2  * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #ifndef ARCH_HELPERS_H
8 #define ARCH_HELPERS_H
9 
10 #include <arch.h>
11 #include <cdefs.h>
12 #include <stdint.h>
13 #include <string.h>
14 
15 /**********************************************************************
16  * Macros which create inline functions to read or write CPU system
17  * registers
18  *********************************************************************/
19 
20 #define _DEFINE_COPROCR_WRITE_FUNC(_name, coproc, opc1, CRn, CRm, opc2)	\
21 static inline void write_## _name(u_register_t v)			\
22 {									\
23 	__asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
24 }
25 
26 #define _DEFINE_COPROCR_READ_FUNC(_name, coproc, opc1, CRn, CRm, opc2)	\
27 static inline u_register_t read_ ## _name(void)				\
28 {									\
29 	u_register_t v;							\
30 	__asm__ volatile ("mrc "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : "=r" (v));\
31 	return v;							\
32 }
33 
34 /*
35  *  The undocumented %Q and %R extended asm are used to implemented the below
36  *  64 bit `mrrc` and `mcrr` instructions.
37  */
38 
39 #define _DEFINE_COPROCR_WRITE_FUNC_64(_name, coproc, opc1, CRm)		\
40 static inline void write64_## _name(uint64_t v)				\
41 {									\
42 	__asm__ volatile ("mcrr "#coproc","#opc1", %Q0, %R0,"#CRm : : "r" (v));\
43 }
44 
45 #define _DEFINE_COPROCR_READ_FUNC_64(_name, coproc, opc1, CRm)		\
46 static inline uint64_t read64_## _name(void)				\
47 {	uint64_t v;							\
48 	__asm__ volatile ("mrrc "#coproc","#opc1", %Q0, %R0,"#CRm : "=r" (v));\
49 	return v;							\
50 }
51 
52 #define _DEFINE_SYSREG_READ_FUNC(_name, _reg_name)			\
53 static inline u_register_t read_ ## _name(void)				\
54 {									\
55 	u_register_t v;							\
56 	__asm__ volatile ("mrs %0, " #_reg_name : "=r" (v));		\
57 	return v;							\
58 }
59 
60 #define _DEFINE_SYSREG_WRITE_FUNC(_name, _reg_name)			\
61 static inline void write_ ## _name(u_register_t v)			\
62 {									\
63 	__asm__ volatile ("msr " #_reg_name ", %0" : : "r" (v));	\
64 }
65 
66 #define _DEFINE_SYSREG_WRITE_CONST_FUNC(_name, _reg_name)		\
67 static inline void write_ ## _name(const u_register_t v)		\
68 {									\
69 	__asm__ volatile ("msr " #_reg_name ", %0" : : "i" (v));	\
70 }
71 
72 /* Define read function for coproc register */
73 #define DEFINE_COPROCR_READ_FUNC(_name, ...) 				\
74 	_DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__)
75 
76 /* Define write function for coproc register */
77 #define DEFINE_COPROCR_WRITE_FUNC(_name, ...) 				\
78 	_DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__)
79 
80 /* Define read & write function for coproc register */
81 #define DEFINE_COPROCR_RW_FUNCS(_name, ...) 				\
82 	_DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__)			\
83 	_DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__)
84 
85 /* Define 64 bit read function for coproc register */
86 #define DEFINE_COPROCR_READ_FUNC_64(_name, ...) 			\
87 	_DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__)
88 
89 /* Define 64 bit write function for coproc register */
90 #define DEFINE_COPROCR_WRITE_FUNC_64(_name, ...) 			\
91 	_DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__)
92 
93 /* Define 64 bit read & write function for coproc register */
94 #define DEFINE_COPROCR_RW_FUNCS_64(_name, ...) 				\
95 	_DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__)		\
96 	_DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__)
97 
98 /* Define read & write function for system register */
99 #define DEFINE_SYSREG_RW_FUNCS(_name)					\
100 	_DEFINE_SYSREG_READ_FUNC(_name, _name)				\
101 	_DEFINE_SYSREG_WRITE_FUNC(_name, _name)
102 
103 /**********************************************************************
104  * Macros to create inline functions for tlbi operations
105  *********************************************************************/
106 
107 #define _DEFINE_TLBIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2)		\
108 static inline void tlbi##_op(void)					\
109 {									\
110 	u_register_t v = 0;						\
111 	__asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
112 }
113 
114 #define _DEFINE_BPIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2)		\
115 static inline void bpi##_op(void)					\
116 {									\
117 	u_register_t v = 0;						\
118 	__asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
119 }
120 
121 #define _DEFINE_TLBIOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2)	\
122 static inline void tlbi##_op(u_register_t v)				\
123 {									\
124 	__asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
125 }
126 
127 /* Define function for simple TLBI operation */
128 #define DEFINE_TLBIOP_FUNC(_op, ...)					\
129 	_DEFINE_TLBIOP_FUNC(_op, __VA_ARGS__)
130 
131 /* Define function for TLBI operation with register parameter */
132 #define DEFINE_TLBIOP_PARAM_FUNC(_op, ...)				\
133 	_DEFINE_TLBIOP_PARAM_FUNC(_op, __VA_ARGS__)
134 
135 /* Define function for simple BPI operation */
136 #define DEFINE_BPIOP_FUNC(_op, ...)					\
137 	_DEFINE_BPIOP_FUNC(_op, __VA_ARGS__)
138 
139 /**********************************************************************
140  * Macros to create inline functions for DC operations
141  *********************************************************************/
142 #define _DEFINE_DCOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2)	\
143 static inline void dc##_op(u_register_t v)				\
144 {									\
145 	__asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
146 }
147 
148 /* Define function for DC operation with register parameter */
149 #define DEFINE_DCOP_PARAM_FUNC(_op, ...)				\
150 	_DEFINE_DCOP_PARAM_FUNC(_op, __VA_ARGS__)
151 
152 /**********************************************************************
153  * Macros to create inline functions for system instructions
154  *********************************************************************/
155  /* Define function for simple system instruction */
156 #define DEFINE_SYSOP_FUNC(_op)						\
157 static inline void _op(void)						\
158 {									\
159 	__asm__ (#_op);							\
160 }
161 
162 
163 /* Define function for system instruction with type specifier */
164 #define DEFINE_SYSOP_TYPE_FUNC(_op, _type)				\
165 static inline void _op ## _type(void)					\
166 {									\
167 	__asm__ (#_op " " #_type);					\
168 }
169 
170 /* Define function for system instruction with register parameter */
171 #define DEFINE_SYSOP_TYPE_PARAM_FUNC(_op, _type)			\
172 static inline void _op ## _type(u_register_t v)				\
173 {									\
174 	 __asm__ (#_op " " #_type ", %0" : : "r" (v));			\
175 }
176 
177 void flush_dcache_range(uintptr_t addr, size_t size);
178 void clean_dcache_range(uintptr_t addr, size_t size);
179 void inv_dcache_range(uintptr_t addr, size_t size);
180 
181 void dcsw_op_louis(u_register_t op_type);
182 void dcsw_op_all(u_register_t op_type);
183 
184 void disable_mmu_secure(void);
185 void disable_mmu_icache_secure(void);
186 
187 DEFINE_SYSOP_FUNC(wfi)
188 DEFINE_SYSOP_FUNC(wfe)
189 DEFINE_SYSOP_FUNC(sev)
190 DEFINE_SYSOP_TYPE_FUNC(dsb, sy)
191 DEFINE_SYSOP_TYPE_FUNC(dmb, sy)
192 DEFINE_SYSOP_TYPE_FUNC(dmb, st)
193 
194 /* dmb ld is not valid for armv7/thumb machines */
195 #if ARM_ARCH_MAJOR != 7
196 DEFINE_SYSOP_TYPE_FUNC(dmb, ld)
197 #endif
198 
199 DEFINE_SYSOP_TYPE_FUNC(dsb, ish)
200 DEFINE_SYSOP_TYPE_FUNC(dsb, ishst)
201 DEFINE_SYSOP_TYPE_FUNC(dmb, ish)
202 DEFINE_SYSOP_TYPE_FUNC(dmb, ishst)
203 DEFINE_SYSOP_FUNC(isb)
204 
205 void __dead2 smc(uint32_t r0, uint32_t r1, uint32_t r2, uint32_t r3,
206 		 uint32_t r4, uint32_t r5, uint32_t r6, uint32_t r7);
207 
208 DEFINE_SYSREG_RW_FUNCS(spsr)
209 DEFINE_SYSREG_RW_FUNCS(cpsr)
210 
211 /*******************************************************************************
212  * System register accessor prototypes
213  ******************************************************************************/
214 DEFINE_COPROCR_READ_FUNC(mpidr, MPIDR)
215 DEFINE_COPROCR_READ_FUNC(midr, MIDR)
216 DEFINE_COPROCR_READ_FUNC(id_pfr0, ID_PFR0)
217 DEFINE_COPROCR_READ_FUNC(id_pfr1, ID_PFR1)
218 DEFINE_COPROCR_READ_FUNC(isr, ISR)
219 DEFINE_COPROCR_READ_FUNC(clidr, CLIDR)
220 DEFINE_COPROCR_READ_FUNC_64(cntpct, CNTPCT_64)
221 
222 DEFINE_COPROCR_RW_FUNCS(scr, SCR)
223 DEFINE_COPROCR_RW_FUNCS(ctr, CTR)
224 DEFINE_COPROCR_RW_FUNCS(sctlr, SCTLR)
225 DEFINE_COPROCR_RW_FUNCS(actlr, ACTLR)
226 DEFINE_COPROCR_RW_FUNCS(hsctlr, HSCTLR)
227 DEFINE_COPROCR_RW_FUNCS(hcr, HCR)
228 DEFINE_COPROCR_RW_FUNCS(hcptr, HCPTR)
229 DEFINE_COPROCR_RW_FUNCS(cntfrq, CNTFRQ)
230 DEFINE_COPROCR_RW_FUNCS(cnthctl, CNTHCTL)
231 DEFINE_COPROCR_RW_FUNCS(mair0, MAIR0)
232 DEFINE_COPROCR_RW_FUNCS(mair1, MAIR1)
233 DEFINE_COPROCR_RW_FUNCS(hmair0, HMAIR0)
234 DEFINE_COPROCR_RW_FUNCS(ttbcr, TTBCR)
235 DEFINE_COPROCR_RW_FUNCS(htcr, HTCR)
236 DEFINE_COPROCR_RW_FUNCS(ttbr0, TTBR0)
237 DEFINE_COPROCR_RW_FUNCS_64(ttbr0, TTBR0_64)
238 DEFINE_COPROCR_RW_FUNCS(ttbr1, TTBR1)
239 DEFINE_COPROCR_RW_FUNCS_64(httbr, HTTBR_64)
240 DEFINE_COPROCR_RW_FUNCS(vpidr, VPIDR)
241 DEFINE_COPROCR_RW_FUNCS(vmpidr, VMPIDR)
242 DEFINE_COPROCR_RW_FUNCS_64(vttbr, VTTBR_64)
243 DEFINE_COPROCR_RW_FUNCS_64(ttbr1, TTBR1_64)
244 DEFINE_COPROCR_RW_FUNCS_64(cntvoff, CNTVOFF_64)
245 DEFINE_COPROCR_RW_FUNCS(csselr, CSSELR)
246 DEFINE_COPROCR_RW_FUNCS(hstr, HSTR)
247 DEFINE_COPROCR_RW_FUNCS(cnthp_ctl_el2, CNTHP_CTL)
248 DEFINE_COPROCR_RW_FUNCS(cnthp_tval_el2, CNTHP_TVAL)
249 DEFINE_COPROCR_RW_FUNCS_64(cnthp_cval_el2, CNTHP_CVAL_64)
250 
251 #define get_cntp_ctl_enable(x)  (((x) >> CNTP_CTL_ENABLE_SHIFT) & \
252 					 CNTP_CTL_ENABLE_MASK)
253 #define get_cntp_ctl_imask(x)   (((x) >> CNTP_CTL_IMASK_SHIFT) & \
254 					 CNTP_CTL_IMASK_MASK)
255 #define get_cntp_ctl_istatus(x) (((x) >> CNTP_CTL_ISTATUS_SHIFT) & \
256 					 CNTP_CTL_ISTATUS_MASK)
257 
258 #define set_cntp_ctl_enable(x)  ((x) |= U(1) << CNTP_CTL_ENABLE_SHIFT)
259 #define set_cntp_ctl_imask(x)   ((x) |= U(1) << CNTP_CTL_IMASK_SHIFT)
260 
261 #define clr_cntp_ctl_enable(x)  ((x) &= ~(U(1) << CNTP_CTL_ENABLE_SHIFT))
262 #define clr_cntp_ctl_imask(x)   ((x) &= ~(U(1) << CNTP_CTL_IMASK_SHIFT))
263 
264 DEFINE_COPROCR_RW_FUNCS(icc_sre_el1, ICC_SRE)
265 DEFINE_COPROCR_RW_FUNCS(icc_sre_el2, ICC_HSRE)
266 DEFINE_COPROCR_RW_FUNCS(icc_sre_el3, ICC_MSRE)
267 DEFINE_COPROCR_RW_FUNCS(icc_pmr_el1, ICC_PMR)
268 DEFINE_COPROCR_RW_FUNCS(icc_rpr_el1, ICC_RPR)
269 DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el3, ICC_MGRPEN1)
270 DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el1, ICC_IGRPEN1)
271 DEFINE_COPROCR_RW_FUNCS(icc_igrpen0_el1, ICC_IGRPEN0)
272 DEFINE_COPROCR_RW_FUNCS(icc_hppir0_el1, ICC_HPPIR0)
273 DEFINE_COPROCR_RW_FUNCS(icc_hppir1_el1, ICC_HPPIR1)
274 DEFINE_COPROCR_RW_FUNCS(icc_iar0_el1, ICC_IAR0)
275 DEFINE_COPROCR_RW_FUNCS(icc_iar1_el1, ICC_IAR1)
276 DEFINE_COPROCR_RW_FUNCS(icc_eoir0_el1, ICC_EOIR0)
277 DEFINE_COPROCR_RW_FUNCS(icc_eoir1_el1, ICC_EOIR1)
278 DEFINE_COPROCR_RW_FUNCS_64(icc_sgi0r_el1, ICC_SGI0R_EL1_64)
279 DEFINE_COPROCR_WRITE_FUNC_64(icc_sgi1r, ICC_SGI1R_EL1_64)
280 
281 DEFINE_COPROCR_RW_FUNCS(hdcr, HDCR)
282 DEFINE_COPROCR_RW_FUNCS(cnthp_ctl, CNTHP_CTL)
283 DEFINE_COPROCR_READ_FUNC(pmcr, PMCR)
284 
285 /*
286  * Address translation
287  */
288 DEFINE_COPROCR_WRITE_FUNC(ats1cpr, ATS1CPR)
289 DEFINE_COPROCR_WRITE_FUNC(ats1hr, ATS1HR)
290 DEFINE_COPROCR_RW_FUNCS_64(par, PAR_64)
291 
292 DEFINE_COPROCR_RW_FUNCS(nsacr, NSACR)
293 
294 /* AArch32 coproc registers for 32bit MMU descriptor support */
295 DEFINE_COPROCR_RW_FUNCS(prrr, PRRR)
296 DEFINE_COPROCR_RW_FUNCS(nmrr, NMRR)
297 DEFINE_COPROCR_RW_FUNCS(dacr, DACR)
298 
299 DEFINE_COPROCR_RW_FUNCS(amcntenset0, AMCNTENSET0)
300 DEFINE_COPROCR_RW_FUNCS(amcntenset1, AMCNTENSET1)
301 DEFINE_COPROCR_RW_FUNCS(amcntenclr0, AMCNTENCLR0)
302 DEFINE_COPROCR_RW_FUNCS(amcntenclr1, AMCNTENCLR1)
303 
304 DEFINE_COPROCR_RW_FUNCS_64(amevcntr00, AMEVCNTR00)
305 DEFINE_COPROCR_RW_FUNCS_64(amevcntr01, AMEVCNTR01)
306 DEFINE_COPROCR_RW_FUNCS_64(amevcntr02, AMEVCNTR02)
307 DEFINE_COPROCR_RW_FUNCS_64(amevcntr03, AMEVCNTR03)
308 
309 /*
310  * TLBI operation prototypes
311  */
312 DEFINE_TLBIOP_FUNC(all, TLBIALL)
313 DEFINE_TLBIOP_FUNC(allis, TLBIALLIS)
314 DEFINE_TLBIOP_PARAM_FUNC(mva, TLBIMVA)
315 DEFINE_TLBIOP_PARAM_FUNC(mvaa, TLBIMVAA)
316 DEFINE_TLBIOP_PARAM_FUNC(mvaais, TLBIMVAAIS)
317 DEFINE_TLBIOP_PARAM_FUNC(mvahis, TLBIMVAHIS)
318 
319 /*
320  * BPI operation prototypes.
321  */
322 DEFINE_BPIOP_FUNC(allis, BPIALLIS)
323 
324 /*
325  * DC operation prototypes
326  */
327 DEFINE_DCOP_PARAM_FUNC(civac, DCCIMVAC)
328 DEFINE_DCOP_PARAM_FUNC(ivac, DCIMVAC)
329 DEFINE_DCOP_PARAM_FUNC(cvac, DCCMVAC)
330 
331 /* Previously defined accessor functions with incomplete register names  */
332 #define dsb()			dsbsy()
333 #define dmb()			dmbsy()
334 
335 /* dmb ld is not valid for armv7/thumb machines, so alias it to dmb */
336 #if ARM_ARCH_MAJOR == 7
337 #define	dmbld()			dmb()
338 #endif
339 
340 #define IS_IN_SECURE() \
341 	(GET_NS_BIT(read_scr()) == 0)
342 
343 #define IS_IN_HYP()	(GET_M32(read_cpsr()) == MODE32_hyp)
344 #define IS_IN_SVC()	(GET_M32(read_cpsr()) == MODE32_svc)
345 #define IS_IN_MON()	(GET_M32(read_cpsr()) == MODE32_mon)
346 #define IS_IN_EL2()	IS_IN_HYP()
347 /* If EL3 is AArch32, then secure PL1 and monitor mode correspond to EL3 */
348 #define IS_IN_EL3() \
349 	((GET_M32(read_cpsr()) == MODE32_mon) ||	\
350 		(IS_IN_SECURE() && (GET_M32(read_cpsr()) != MODE32_usr)))
351 
352 static inline unsigned int get_current_el(void)
353 {
354 	if (IS_IN_EL3()) {
355 		return 3U;
356 	} else if (IS_IN_EL2()) {
357 		return 2U;
358 	} else {
359 		return 1U;
360 	}
361 }
362 
363 /* Macros for compatibility with AArch64 system registers */
364 #define read_mpidr_el1()	read_mpidr()
365 
366 #define read_scr_el3()		read_scr()
367 #define write_scr_el3(_v)	write_scr(_v)
368 
369 #define read_hcr_el2()		read_hcr()
370 #define write_hcr_el2(_v)	write_hcr(_v)
371 
372 #define read_cpacr_el1()	read_cpacr()
373 #define write_cpacr_el1(_v)	write_cpacr(_v)
374 
375 #define read_cntfrq_el0()	read_cntfrq()
376 #define write_cntfrq_el0(_v)	write_cntfrq(_v)
377 #define read_isr_el1()		read_isr()
378 
379 #define read_cntpct_el0()	read64_cntpct()
380 
381 #define read_ctr_el0()		read_ctr()
382 
383 #define write_icc_sgi0r_el1(_v)	write64_icc_sgi0r_el1(_v)
384 
385 #define read_daif()		read_cpsr()
386 #define write_daif(flags)	write_cpsr(flags)
387 
388 #define read_cnthp_cval_el2()	read64_cnthp_cval_el2()
389 #define write_cnthp_cval_el2(v)	write64_cnthp_cval_el2(v)
390 
391 #define read_amcntenset0_el0()	read_amcntenset0()
392 #define read_amcntenset1_el0()	read_amcntenset1()
393 
394 /* Helper functions to manipulate CPSR */
395 static inline void enable_irq(void)
396 {
397 	/*
398 	 * The compiler memory barrier will prevent the compiler from
399 	 * scheduling non-volatile memory access after the write to the
400 	 * register.
401 	 *
402 	 * This could happen if some initialization code issues non-volatile
403 	 * accesses to an area used by an interrupt handler, in the assumption
404 	 * that it is safe as the interrupts are disabled at the time it does
405 	 * that (according to program order). However, non-volatile accesses
406 	 * are not necessarily in program order relatively with volatile inline
407 	 * assembly statements (and volatile accesses).
408 	 */
409 	COMPILER_BARRIER();
410 	__asm__ volatile ("cpsie	i");
411 	isb();
412 }
413 
414 static inline void enable_serror(void)
415 {
416 	COMPILER_BARRIER();
417 	__asm__ volatile ("cpsie	a");
418 	isb();
419 }
420 
421 static inline void enable_fiq(void)
422 {
423 	COMPILER_BARRIER();
424 	__asm__ volatile ("cpsie	f");
425 	isb();
426 }
427 
428 static inline void disable_irq(void)
429 {
430 	COMPILER_BARRIER();
431 	__asm__ volatile ("cpsid	i");
432 	isb();
433 }
434 
435 static inline void disable_serror(void)
436 {
437 	COMPILER_BARRIER();
438 	__asm__ volatile ("cpsid	a");
439 	isb();
440 }
441 
442 static inline void disable_fiq(void)
443 {
444 	COMPILER_BARRIER();
445 	__asm__ volatile ("cpsid	f");
446 	isb();
447 }
448 
449 #endif /* ARCH_HELPERS_H */
450