xref: /rk3399_rockchip-uboot/arch/arm/include/asm/macro.h (revision 9563e87b414d247aa89015c100c1fdfec5a7ea29)
1819833afSPeter Tyser /*
2819833afSPeter Tyser  * include/asm-arm/macro.h
3819833afSPeter Tyser  *
4819833afSPeter Tyser  * Copyright (C) 2009 Jean-Christophe PLAGNIOL-VILLARD <plagnioj@jcrosoft.com>
5819833afSPeter Tyser  *
61a459660SWolfgang Denk  * SPDX-License-Identifier:	GPL-2.0+
7819833afSPeter Tyser  */
8819833afSPeter Tyser 
9819833afSPeter Tyser #ifndef __ASM_ARM_MACRO_H__
10819833afSPeter Tyser #define __ASM_ARM_MACRO_H__
11ec6617c3SAlison Wang 
12ec6617c3SAlison Wang #ifdef CONFIG_ARM64
13ec6617c3SAlison Wang #include <asm/system.h>
14ec6617c3SAlison Wang #endif
15ec6617c3SAlison Wang 
16819833afSPeter Tyser #ifdef __ASSEMBLY__
17819833afSPeter Tyser 
18819833afSPeter Tyser /*
19819833afSPeter Tyser  * These macros provide a convenient way to write 8, 16 and 32 bit data
20819833afSPeter Tyser  * to any address.
21819833afSPeter Tyser  * Registers r4 and r5 are used, any data in these registers are
22819833afSPeter Tyser  * overwritten by the macros.
23819833afSPeter Tyser  * The macros are valid for any ARM architecture, they do not implement
24819833afSPeter Tyser  * any memory barriers so caution is recommended when using these when the
25819833afSPeter Tyser  * caches are enabled or on a multi-core system.
26819833afSPeter Tyser  */
27819833afSPeter Tyser 
28819833afSPeter Tyser .macro	write32, addr, data
29819833afSPeter Tyser 	ldr	r4, =\addr
30819833afSPeter Tyser 	ldr	r5, =\data
31819833afSPeter Tyser 	str	r5, [r4]
32819833afSPeter Tyser .endm
33819833afSPeter Tyser 
34819833afSPeter Tyser .macro	write16, addr, data
35819833afSPeter Tyser 	ldr	r4, =\addr
36819833afSPeter Tyser 	ldrh	r5, =\data
37819833afSPeter Tyser 	strh	r5, [r4]
38819833afSPeter Tyser .endm
39819833afSPeter Tyser 
40819833afSPeter Tyser .macro	write8, addr, data
41819833afSPeter Tyser 	ldr	r4, =\addr
42819833afSPeter Tyser 	ldrb	r5, =\data
43819833afSPeter Tyser 	strb	r5, [r4]
44819833afSPeter Tyser .endm
45819833afSPeter Tyser 
46819833afSPeter Tyser /*
47819833afSPeter Tyser  * This macro generates a loop that can be used for delays in the code.
48819833afSPeter Tyser  * Register r4 is used, any data in this register is overwritten by the
49819833afSPeter Tyser  * macro.
50819833afSPeter Tyser  * The macro is valid for any ARM architeture. The actual time spent in the
51819833afSPeter Tyser  * loop will vary from CPU to CPU though.
52819833afSPeter Tyser  */
53819833afSPeter Tyser 
54819833afSPeter Tyser .macro	wait_timer, time
55819833afSPeter Tyser 	ldr	r4, =\time
56819833afSPeter Tyser 1:
57819833afSPeter Tyser 	nop
58819833afSPeter Tyser 	subs	r4, r4, #1
59819833afSPeter Tyser 	bcs	1b
60819833afSPeter Tyser .endm
61819833afSPeter Tyser 
620ae76531SDavid Feng #ifdef CONFIG_ARM64
630ae76531SDavid Feng /*
640ae76531SDavid Feng  * Register aliases.
650ae76531SDavid Feng  */
660ae76531SDavid Feng lr	.req	x30
670ae76531SDavid Feng 
680ae76531SDavid Feng /*
690ae76531SDavid Feng  * Branch according to exception level
700ae76531SDavid Feng  */
710ae76531SDavid Feng .macro	switch_el, xreg, el3_label, el2_label, el1_label
720ae76531SDavid Feng 	mrs	\xreg, CurrentEL
730ae76531SDavid Feng 	cmp	\xreg, 0xc
740ae76531SDavid Feng 	b.eq	\el3_label
750ae76531SDavid Feng 	cmp	\xreg, 0x8
760ae76531SDavid Feng 	b.eq	\el2_label
770ae76531SDavid Feng 	cmp	\xreg, 0x4
780ae76531SDavid Feng 	b.eq	\el1_label
790ae76531SDavid Feng .endm
800ae76531SDavid Feng 
810ae76531SDavid Feng /*
82*9563e87bSZhihuan He  * Branch if current processor is a Cortex-A35 core.
83*9563e87bSZhihuan He  */
84*9563e87bSZhihuan He .macro	branch_if_a35_core, xreg, a35_label
85*9563e87bSZhihuan He 	mrs	\xreg, midr_el1
86*9563e87bSZhihuan He 	lsr	\xreg, \xreg, #4
87*9563e87bSZhihuan He 	and	\xreg, \xreg, #0x00000FFF
88*9563e87bSZhihuan He 	cmp	\xreg, #0xD04		/* Cortex-A35 MPCore processor. */
89*9563e87bSZhihuan He 	b.eq	\a35_label
90*9563e87bSZhihuan He .endm
91*9563e87bSZhihuan He 
92*9563e87bSZhihuan He /*
9337118fb2SBhupesh Sharma  * Branch if current processor is a Cortex-A57 core.
9437118fb2SBhupesh Sharma  */
9537118fb2SBhupesh Sharma .macro	branch_if_a57_core, xreg, a57_label
9637118fb2SBhupesh Sharma 	mrs	\xreg, midr_el1
9737118fb2SBhupesh Sharma 	lsr	\xreg, \xreg, #4
9837118fb2SBhupesh Sharma 	and	\xreg, \xreg, #0x00000FFF
9937118fb2SBhupesh Sharma 	cmp	\xreg, #0xD07		/* Cortex-A57 MPCore processor. */
10037118fb2SBhupesh Sharma 	b.eq	\a57_label
10137118fb2SBhupesh Sharma .endm
10237118fb2SBhupesh Sharma 
10337118fb2SBhupesh Sharma /*
10437118fb2SBhupesh Sharma  * Branch if current processor is a Cortex-A53 core.
10537118fb2SBhupesh Sharma  */
10637118fb2SBhupesh Sharma .macro	branch_if_a53_core, xreg, a53_label
10737118fb2SBhupesh Sharma 	mrs	\xreg, midr_el1
10837118fb2SBhupesh Sharma 	lsr	\xreg, \xreg, #4
10937118fb2SBhupesh Sharma 	and	\xreg, \xreg, #0x00000FFF
11037118fb2SBhupesh Sharma 	cmp	\xreg, #0xD03		/* Cortex-A53 MPCore processor. */
11137118fb2SBhupesh Sharma 	b.eq	\a53_label
11237118fb2SBhupesh Sharma .endm
11337118fb2SBhupesh Sharma 
11437118fb2SBhupesh Sharma /*
1150ae76531SDavid Feng  * Branch if current processor is a slave,
1160ae76531SDavid Feng  * choose processor with all zero affinity value as the master.
1170ae76531SDavid Feng  */
1180ae76531SDavid Feng .macro	branch_if_slave, xreg, slave_label
11923b5877cSLinus Walleij #ifdef CONFIG_ARMV8_MULTIENTRY
12023b5877cSLinus Walleij 	/* NOTE: MPIDR handling will be erroneous on multi-cluster machines */
1210ae76531SDavid Feng 	mrs	\xreg, mpidr_el1
1220ae76531SDavid Feng 	tst	\xreg, #0xff		/* Test Affinity 0 */
1230ae76531SDavid Feng 	b.ne	\slave_label
1240ae76531SDavid Feng 	lsr	\xreg, \xreg, #8
1250ae76531SDavid Feng 	tst	\xreg, #0xff		/* Test Affinity 1 */
1260ae76531SDavid Feng 	b.ne	\slave_label
1270ae76531SDavid Feng 	lsr	\xreg, \xreg, #8
1280ae76531SDavid Feng 	tst	\xreg, #0xff		/* Test Affinity 2 */
1290ae76531SDavid Feng 	b.ne	\slave_label
1300ae76531SDavid Feng 	lsr	\xreg, \xreg, #16
1310ae76531SDavid Feng 	tst	\xreg, #0xff		/* Test Affinity 3 */
1320ae76531SDavid Feng 	b.ne	\slave_label
13323b5877cSLinus Walleij #endif
1340ae76531SDavid Feng .endm
1350ae76531SDavid Feng 
1360ae76531SDavid Feng /*
1370ae76531SDavid Feng  * Branch if current processor is a master,
1380ae76531SDavid Feng  * choose processor with all zero affinity value as the master.
1390ae76531SDavid Feng  */
1400ae76531SDavid Feng .macro	branch_if_master, xreg1, xreg2, master_label
14123b5877cSLinus Walleij #ifdef CONFIG_ARMV8_MULTIENTRY
14223b5877cSLinus Walleij 	/* NOTE: MPIDR handling will be erroneous on multi-cluster machines */
1430ae76531SDavid Feng 	mrs	\xreg1, mpidr_el1
1440ae76531SDavid Feng 	lsr	\xreg2, \xreg1, #32
145ee341319Szijun_hu 	lsl	\xreg2, \xreg2, #32
1460ae76531SDavid Feng 	lsl	\xreg1, \xreg1, #40
1470ae76531SDavid Feng 	lsr	\xreg1, \xreg1, #40
1480ae76531SDavid Feng 	orr	\xreg1, \xreg1, \xreg2
1490ae76531SDavid Feng 	cbz	\xreg1, \master_label
15023b5877cSLinus Walleij #else
15123b5877cSLinus Walleij 	b 	\master_label
15223b5877cSLinus Walleij #endif
1530ae76531SDavid Feng .endm
1540ae76531SDavid Feng 
155ec6617c3SAlison Wang /*
156ec6617c3SAlison Wang  * Switch from EL3 to EL2 for ARMv8
157ec6617c3SAlison Wang  * @ep:     kernel entry point
158ec6617c3SAlison Wang  * @flag:   The execution state flag for lower exception
159ec6617c3SAlison Wang  *          level, ES_TO_AARCH64 or ES_TO_AARCH32
160ec6617c3SAlison Wang  * @tmp:    temporary register
161ec6617c3SAlison Wang  *
162ec6617c3SAlison Wang  * For loading 32-bit OS, x1 is machine nr and x2 is ftaddr.
163ec6617c3SAlison Wang  * For loading 64-bit OS, x0 is physical address to the FDT blob.
164ec6617c3SAlison Wang  * They will be passed to the guest.
165ec6617c3SAlison Wang  */
166ec6617c3SAlison Wang .macro armv8_switch_to_el2_m, ep, flag, tmp
16740f8dec5SYork Sun 	msr	cptr_el3, xzr		/* Disable coprocessor traps to EL3 */
168ec6617c3SAlison Wang 	mov	\tmp, #CPTR_EL2_RES1
169ec6617c3SAlison Wang 	msr	cptr_el2, \tmp		/* Disable coprocessor traps to EL2 */
17040f8dec5SYork Sun 
171148822d5SDavid Feng 	/* Initialize Generic Timers */
172148822d5SDavid Feng 	msr	cntvoff_el2, xzr
173148822d5SDavid Feng 
17440f8dec5SYork Sun 	/* Initialize SCTLR_EL2
17540f8dec5SYork Sun 	 *
17640f8dec5SYork Sun 	 * setting RES1 bits (29,28,23,22,18,16,11,5,4) to 1
17740f8dec5SYork Sun 	 * and RES0 bits (31,30,27,26,24,21,20,17,15-13,10-6) +
17840f8dec5SYork Sun 	 * EE,WXN,I,SA,C,A,M to 0
17940f8dec5SYork Sun 	 */
180ec6617c3SAlison Wang 	ldr	\tmp, =(SCTLR_EL2_RES1 | SCTLR_EL2_EE_LE |\
181ec6617c3SAlison Wang 			SCTLR_EL2_WXN_DIS | SCTLR_EL2_ICACHE_DIS |\
182ec6617c3SAlison Wang 			SCTLR_EL2_SA_DIS | SCTLR_EL2_DCACHE_DIS |\
183ec6617c3SAlison Wang 			SCTLR_EL2_ALIGN_DIS | SCTLR_EL2_MMU_DIS)
184ec6617c3SAlison Wang 	msr	sctlr_el2, \tmp
185ec6617c3SAlison Wang 
186ec6617c3SAlison Wang 	mov	\tmp, sp
187ec6617c3SAlison Wang 	msr	sp_el2, \tmp		/* Migrate SP */
188ec6617c3SAlison Wang 	mrs	\tmp, vbar_el3
189ec6617c3SAlison Wang 	msr	vbar_el2, \tmp		/* Migrate VBAR */
190ec6617c3SAlison Wang 
191ec6617c3SAlison Wang 	/* Check switch to AArch64 EL2 or AArch32 Hypervisor mode */
192ec6617c3SAlison Wang 	cmp	\flag, #ES_TO_AARCH32
193ec6617c3SAlison Wang 	b.eq	1f
194ec6617c3SAlison Wang 
195ec6617c3SAlison Wang 	/*
196ec6617c3SAlison Wang 	 * The next lower exception level is AArch64, 64bit EL2 | HCE |
1975cc8d668Smacro.wave.z@gmail.com 	 * RES1 (Bits[5:4]) | Non-secure EL0/EL1.
1985cc8d668Smacro.wave.z@gmail.com 	 * and the SMD depends on requirements.
199ec6617c3SAlison Wang 	 */
2005cc8d668Smacro.wave.z@gmail.com #ifdef CONFIG_ARMV8_PSCI
2015cc8d668Smacro.wave.z@gmail.com 	ldr	\tmp, =(SCR_EL3_RW_AARCH64 | SCR_EL3_HCE_EN |\
2025cc8d668Smacro.wave.z@gmail.com 			SCR_EL3_RES1 | SCR_EL3_NS_EN)
2035cc8d668Smacro.wave.z@gmail.com #else
204ec6617c3SAlison Wang 	ldr	\tmp, =(SCR_EL3_RW_AARCH64 | SCR_EL3_HCE_EN |\
205ec6617c3SAlison Wang 			SCR_EL3_SMD_DIS | SCR_EL3_RES1 |\
206ec6617c3SAlison Wang 			SCR_EL3_NS_EN)
2075cc8d668Smacro.wave.z@gmail.com #endif
208ec6617c3SAlison Wang 	msr	scr_el3, \tmp
20940f8dec5SYork Sun 
21040f8dec5SYork Sun 	/* Return to the EL2_SP2 mode from EL3 */
211ec6617c3SAlison Wang 	ldr	\tmp, =(SPSR_EL_DEBUG_MASK | SPSR_EL_SERR_MASK |\
212ec6617c3SAlison Wang 			SPSR_EL_IRQ_MASK | SPSR_EL_FIQ_MASK |\
213ec6617c3SAlison Wang 			SPSR_EL_M_AARCH64 | SPSR_EL_M_EL2H)
214ec6617c3SAlison Wang 	msr	spsr_el3, \tmp
215ec6617c3SAlison Wang 	msr	elr_el3, \ep
216ec6617c3SAlison Wang 	eret
217ec6617c3SAlison Wang 
218ec6617c3SAlison Wang 1:
219ec6617c3SAlison Wang 	/*
220ec6617c3SAlison Wang 	 * The next lower exception level is AArch32, 32bit EL2 | HCE |
221ec6617c3SAlison Wang 	 * SMD | RES1 (Bits[5:4]) | Non-secure EL0/EL1.
222ec6617c3SAlison Wang 	 */
223ec6617c3SAlison Wang 	ldr	\tmp, =(SCR_EL3_RW_AARCH32 | SCR_EL3_HCE_EN |\
224ec6617c3SAlison Wang 			SCR_EL3_SMD_DIS | SCR_EL3_RES1 |\
225ec6617c3SAlison Wang 			SCR_EL3_NS_EN)
226ec6617c3SAlison Wang 	msr	scr_el3, \tmp
227ec6617c3SAlison Wang 
228ec6617c3SAlison Wang 	/* Return to AArch32 Hypervisor mode */
229ec6617c3SAlison Wang 	ldr     \tmp, =(SPSR_EL_END_LE | SPSR_EL_ASYN_MASK |\
230ec6617c3SAlison Wang 			SPSR_EL_IRQ_MASK | SPSR_EL_FIQ_MASK |\
231ec6617c3SAlison Wang 			SPSR_EL_T_A32 | SPSR_EL_M_AARCH32 |\
232ec6617c3SAlison Wang 			SPSR_EL_M_HYP)
233ec6617c3SAlison Wang 	msr	spsr_el3, \tmp
234ec6617c3SAlison Wang 	msr     elr_el3, \ep
23540f8dec5SYork Sun 	eret
23640f8dec5SYork Sun .endm
23740f8dec5SYork Sun 
238ec6617c3SAlison Wang /*
239ec6617c3SAlison Wang  * Switch from EL2 to EL1 for ARMv8
240ec6617c3SAlison Wang  * @ep:     kernel entry point
241ec6617c3SAlison Wang  * @flag:   The execution state flag for lower exception
242ec6617c3SAlison Wang  *          level, ES_TO_AARCH64 or ES_TO_AARCH32
243ec6617c3SAlison Wang  * @tmp:    temporary register
244ec6617c3SAlison Wang  *
245ec6617c3SAlison Wang  * For loading 32-bit OS, x1 is machine nr and x2 is ftaddr.
246ec6617c3SAlison Wang  * For loading 64-bit OS, x0 is physical address to the FDT blob.
247ec6617c3SAlison Wang  * They will be passed to the guest.
248ec6617c3SAlison Wang  */
249ec6617c3SAlison Wang .macro armv8_switch_to_el1_m, ep, flag, tmp
25040f8dec5SYork Sun 	/* Initialize Generic Timers */
251ec6617c3SAlison Wang 	mrs	\tmp, cnthctl_el2
252ec6617c3SAlison Wang 	/* Enable EL1 access to timers */
253ec6617c3SAlison Wang 	orr	\tmp, \tmp, #(CNTHCTL_EL2_EL1PCEN_EN |\
254ec6617c3SAlison Wang 		CNTHCTL_EL2_EL1PCTEN_EN)
255ec6617c3SAlison Wang 	msr	cnthctl_el2, \tmp
25640f8dec5SYork Sun 	msr	cntvoff_el2, xzr
25740f8dec5SYork Sun 
25840f8dec5SYork Sun 	/* Initilize MPID/MPIDR registers */
259ec6617c3SAlison Wang 	mrs	\tmp, midr_el1
260ec6617c3SAlison Wang 	msr	vpidr_el2, \tmp
261ec6617c3SAlison Wang 	mrs	\tmp, mpidr_el1
262ec6617c3SAlison Wang 	msr	vmpidr_el2, \tmp
26340f8dec5SYork Sun 
26440f8dec5SYork Sun 	/* Disable coprocessor traps */
265ec6617c3SAlison Wang 	mov	\tmp, #CPTR_EL2_RES1
266ec6617c3SAlison Wang 	msr	cptr_el2, \tmp		/* Disable coprocessor traps to EL2 */
26740f8dec5SYork Sun 	msr	hstr_el2, xzr		/* Disable coprocessor traps to EL2 */
268ec6617c3SAlison Wang 	mov	\tmp, #CPACR_EL1_FPEN_EN
269ec6617c3SAlison Wang 	msr	cpacr_el1, \tmp		/* Enable FP/SIMD at EL1 */
27040f8dec5SYork Sun 
27140f8dec5SYork Sun 	/* SCTLR_EL1 initialization
27240f8dec5SYork Sun 	 *
27340f8dec5SYork Sun 	 * setting RES1 bits (29,28,23,22,20,11) to 1
27440f8dec5SYork Sun 	 * and RES0 bits (31,30,27,21,17,13,10,6) +
27540f8dec5SYork Sun 	 * UCI,EE,EOE,WXN,nTWE,nTWI,UCT,DZE,I,UMA,SED,ITD,
27640f8dec5SYork Sun 	 * CP15BEN,SA0,SA,C,A,M to 0
27740f8dec5SYork Sun 	 */
278ec6617c3SAlison Wang 	ldr	\tmp, =(SCTLR_EL1_RES1 | SCTLR_EL1_UCI_DIS |\
279ec6617c3SAlison Wang 			SCTLR_EL1_EE_LE | SCTLR_EL1_WXN_DIS |\
280ec6617c3SAlison Wang 			SCTLR_EL1_NTWE_DIS | SCTLR_EL1_NTWI_DIS |\
281ec6617c3SAlison Wang 			SCTLR_EL1_UCT_DIS | SCTLR_EL1_DZE_DIS |\
282ec6617c3SAlison Wang 			SCTLR_EL1_ICACHE_DIS | SCTLR_EL1_UMA_DIS |\
283ec6617c3SAlison Wang 			SCTLR_EL1_SED_EN | SCTLR_EL1_ITD_EN |\
284ec6617c3SAlison Wang 			SCTLR_EL1_CP15BEN_DIS | SCTLR_EL1_SA0_DIS |\
285ec6617c3SAlison Wang 			SCTLR_EL1_SA_DIS | SCTLR_EL1_DCACHE_DIS |\
286ec6617c3SAlison Wang 			SCTLR_EL1_ALIGN_DIS | SCTLR_EL1_MMU_DIS)
287ec6617c3SAlison Wang 	msr	sctlr_el1, \tmp
288ec6617c3SAlison Wang 
289ec6617c3SAlison Wang 	mov	\tmp, sp
290ec6617c3SAlison Wang 	msr	sp_el1, \tmp		/* Migrate SP */
291ec6617c3SAlison Wang 	mrs	\tmp, vbar_el2
292ec6617c3SAlison Wang 	msr	vbar_el1, \tmp		/* Migrate VBAR */
293ec6617c3SAlison Wang 
294ec6617c3SAlison Wang 	/* Check switch to AArch64 EL1 or AArch32 Supervisor mode */
295ec6617c3SAlison Wang 	cmp	\flag, #ES_TO_AARCH32
296ec6617c3SAlison Wang 	b.eq	1f
297ec6617c3SAlison Wang 
298ec6617c3SAlison Wang 	/* Initialize HCR_EL2 */
299ec6617c3SAlison Wang 	ldr	\tmp, =(HCR_EL2_RW_AARCH64 | HCR_EL2_HCD_DIS)
300ec6617c3SAlison Wang 	msr	hcr_el2, \tmp
30140f8dec5SYork Sun 
30240f8dec5SYork Sun 	/* Return to the EL1_SP1 mode from EL2 */
303ec6617c3SAlison Wang 	ldr	\tmp, =(SPSR_EL_DEBUG_MASK | SPSR_EL_SERR_MASK |\
304ec6617c3SAlison Wang 			SPSR_EL_IRQ_MASK | SPSR_EL_FIQ_MASK |\
305ec6617c3SAlison Wang 			SPSR_EL_M_AARCH64 | SPSR_EL_M_EL1H)
306ec6617c3SAlison Wang 	msr	spsr_el2, \tmp
307ec6617c3SAlison Wang 	msr     elr_el2, \ep
308ec6617c3SAlison Wang 	eret
309ec6617c3SAlison Wang 
310ec6617c3SAlison Wang 1:
311ec6617c3SAlison Wang 	/* Initialize HCR_EL2 */
312ec6617c3SAlison Wang 	ldr	\tmp, =(HCR_EL2_RW_AARCH32 | HCR_EL2_HCD_DIS)
313ec6617c3SAlison Wang 	msr	hcr_el2, \tmp
314ec6617c3SAlison Wang 
315ec6617c3SAlison Wang 	/* Return to AArch32 Supervisor mode from EL2 */
316ec6617c3SAlison Wang 	ldr	\tmp, =(SPSR_EL_END_LE | SPSR_EL_ASYN_MASK |\
317ec6617c3SAlison Wang 			SPSR_EL_IRQ_MASK | SPSR_EL_FIQ_MASK |\
318ec6617c3SAlison Wang 			SPSR_EL_T_A32 | SPSR_EL_M_AARCH32 |\
319ec6617c3SAlison Wang 			SPSR_EL_M_SVC)
320ec6617c3SAlison Wang 	msr     spsr_el2, \tmp
321ec6617c3SAlison Wang 	msr     elr_el2, \ep
32240f8dec5SYork Sun 	eret
32340f8dec5SYork Sun .endm
32440f8dec5SYork Sun 
32540f8dec5SYork Sun #if defined(CONFIG_GICV3)
32640f8dec5SYork Sun .macro gic_wait_for_interrupt_m xreg1
32740f8dec5SYork Sun 0 :	wfi
32840f8dec5SYork Sun 	mrs     \xreg1, ICC_IAR1_EL1
32940f8dec5SYork Sun 	msr     ICC_EOIR1_EL1, \xreg1
33040f8dec5SYork Sun 	cbnz    \xreg1, 0b
33140f8dec5SYork Sun .endm
33240f8dec5SYork Sun #elif defined(CONFIG_GICV2)
33340f8dec5SYork Sun .macro gic_wait_for_interrupt_m xreg1, wreg2
33440f8dec5SYork Sun 0 :	wfi
33540f8dec5SYork Sun 	ldr     \wreg2, [\xreg1, GICC_AIAR]
33640f8dec5SYork Sun 	str     \wreg2, [\xreg1, GICC_AEOIR]
33759a9cfddSYehuda Yitschak 	and	\wreg2, \wreg2, #0x3ff
33840f8dec5SYork Sun 	cbnz    \wreg2, 0b
33940f8dec5SYork Sun .endm
34040f8dec5SYork Sun #endif
34140f8dec5SYork Sun 
3420ae76531SDavid Feng #endif /* CONFIG_ARM64 */
3430ae76531SDavid Feng 
344819833afSPeter Tyser #endif /* __ASSEMBLY__ */
345819833afSPeter Tyser #endif /* __ASM_ARM_MACRO_H__ */
346