xref: /OK3568_Linux_fs/kernel/arch/xtensa/include/asm/asmmacro.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun  * include/asm-xtensa/asmmacro.h
3*4882a593Smuzhiyun  *
4*4882a593Smuzhiyun  * This file is subject to the terms and conditions of the GNU General Public
5*4882a593Smuzhiyun  * License.  See the file "COPYING" in the main directory of this archive
6*4882a593Smuzhiyun  * for more details.
7*4882a593Smuzhiyun  *
8*4882a593Smuzhiyun  * Copyright (C) 2005 Tensilica Inc.
9*4882a593Smuzhiyun  */
10*4882a593Smuzhiyun 
11*4882a593Smuzhiyun #ifndef _XTENSA_ASMMACRO_H
12*4882a593Smuzhiyun #define _XTENSA_ASMMACRO_H
13*4882a593Smuzhiyun 
14*4882a593Smuzhiyun #include <asm/core.h>
15*4882a593Smuzhiyun 
16*4882a593Smuzhiyun /*
17*4882a593Smuzhiyun  * Some little helpers for loops. Use zero-overhead-loops
18*4882a593Smuzhiyun  * where applicable and if supported by the processor.
19*4882a593Smuzhiyun  *
20*4882a593Smuzhiyun  * __loopi ar, at, size, inc
21*4882a593Smuzhiyun  *         ar	register initialized with the start address
22*4882a593Smuzhiyun  *	   at	scratch register used by macro
23*4882a593Smuzhiyun  *	   size	size immediate value
24*4882a593Smuzhiyun  *	   inc	increment
25*4882a593Smuzhiyun  *
26*4882a593Smuzhiyun  * __loops ar, as, at, inc_log2[, mask_log2][, cond][, ncond]
27*4882a593Smuzhiyun  *	   ar	register initialized with the start address
28*4882a593Smuzhiyun  *	   as	register initialized with the size
29*4882a593Smuzhiyun  *	   at	scratch register use by macro
30*4882a593Smuzhiyun  *	   inc_log2	increment [in log2]
31*4882a593Smuzhiyun  *	   mask_log2	mask [in log2]
32*4882a593Smuzhiyun  *	   cond		true condition (used in loop'cond')
33*4882a593Smuzhiyun  *	   ncond	false condition (used in b'ncond')
34*4882a593Smuzhiyun  *
35*4882a593Smuzhiyun  * __loop  as
36*4882a593Smuzhiyun  *	   restart loop. 'as' register must not have been modified!
37*4882a593Smuzhiyun  *
38*4882a593Smuzhiyun  * __endla ar, as, incr
39*4882a593Smuzhiyun  *	   ar	start address (modified)
40*4882a593Smuzhiyun  *	   as	scratch register used by __loops/__loopi macros or
41*4882a593Smuzhiyun  *		end address used by __loopt macro
42*4882a593Smuzhiyun  *	   inc	increment
43*4882a593Smuzhiyun  */
44*4882a593Smuzhiyun 
45*4882a593Smuzhiyun /*
46*4882a593Smuzhiyun  * loop for given size as immediate
47*4882a593Smuzhiyun  */
48*4882a593Smuzhiyun 
49*4882a593Smuzhiyun 	.macro	__loopi ar, at, size, incr
50*4882a593Smuzhiyun 
51*4882a593Smuzhiyun #if XCHAL_HAVE_LOOPS
52*4882a593Smuzhiyun 		movi	\at, ((\size + \incr - 1) / (\incr))
53*4882a593Smuzhiyun 		loop	\at, 99f
54*4882a593Smuzhiyun #else
55*4882a593Smuzhiyun 		addi	\at, \ar, \size
56*4882a593Smuzhiyun 		98:
57*4882a593Smuzhiyun #endif
58*4882a593Smuzhiyun 
59*4882a593Smuzhiyun 	.endm
60*4882a593Smuzhiyun 
61*4882a593Smuzhiyun /*
62*4882a593Smuzhiyun  * loop for given size in register
63*4882a593Smuzhiyun  */
64*4882a593Smuzhiyun 
65*4882a593Smuzhiyun 	.macro	__loops	ar, as, at, incr_log2, mask_log2, cond, ncond
66*4882a593Smuzhiyun 
67*4882a593Smuzhiyun #if XCHAL_HAVE_LOOPS
68*4882a593Smuzhiyun 		.ifgt \incr_log2 - 1
69*4882a593Smuzhiyun 			addi	\at, \as, (1 << \incr_log2) - 1
70*4882a593Smuzhiyun 			.ifnc \mask_log2,
71*4882a593Smuzhiyun 				extui	\at, \at, \incr_log2, \mask_log2
72*4882a593Smuzhiyun 			.else
73*4882a593Smuzhiyun 				srli	\at, \at, \incr_log2
74*4882a593Smuzhiyun 			.endif
75*4882a593Smuzhiyun 		.endif
76*4882a593Smuzhiyun 		loop\cond	\at, 99f
77*4882a593Smuzhiyun #else
78*4882a593Smuzhiyun 		.ifnc \mask_log2,
79*4882a593Smuzhiyun 			extui	\at, \as, \incr_log2, \mask_log2
80*4882a593Smuzhiyun 		.else
81*4882a593Smuzhiyun 			.ifnc \ncond,
82*4882a593Smuzhiyun 				srli	\at, \as, \incr_log2
83*4882a593Smuzhiyun 			.endif
84*4882a593Smuzhiyun 		.endif
85*4882a593Smuzhiyun 		.ifnc \ncond,
86*4882a593Smuzhiyun 			b\ncond	\at, 99f
87*4882a593Smuzhiyun 
88*4882a593Smuzhiyun 		.endif
89*4882a593Smuzhiyun 		.ifnc \mask_log2,
90*4882a593Smuzhiyun 			slli	\at, \at, \incr_log2
91*4882a593Smuzhiyun 			add	\at, \ar, \at
92*4882a593Smuzhiyun 		.else
93*4882a593Smuzhiyun 			add	\at, \ar, \as
94*4882a593Smuzhiyun 		.endif
95*4882a593Smuzhiyun #endif
96*4882a593Smuzhiyun 		98:
97*4882a593Smuzhiyun 
98*4882a593Smuzhiyun 	.endm
99*4882a593Smuzhiyun 
100*4882a593Smuzhiyun /*
101*4882a593Smuzhiyun  * loop from ar to as
102*4882a593Smuzhiyun  */
103*4882a593Smuzhiyun 
104*4882a593Smuzhiyun 	.macro	__loopt	ar, as, at, incr_log2
105*4882a593Smuzhiyun 
106*4882a593Smuzhiyun #if XCHAL_HAVE_LOOPS
107*4882a593Smuzhiyun 		sub	\at, \as, \ar
108*4882a593Smuzhiyun 		.ifgt	\incr_log2 - 1
109*4882a593Smuzhiyun 			addi	\at, \at, (1 << \incr_log2) - 1
110*4882a593Smuzhiyun 			srli	\at, \at, \incr_log2
111*4882a593Smuzhiyun 		.endif
112*4882a593Smuzhiyun 		loop	\at, 99f
113*4882a593Smuzhiyun #else
114*4882a593Smuzhiyun 		98:
115*4882a593Smuzhiyun #endif
116*4882a593Smuzhiyun 
117*4882a593Smuzhiyun 	.endm
118*4882a593Smuzhiyun 
119*4882a593Smuzhiyun /*
120*4882a593Smuzhiyun  * restart loop. registers must be unchanged
121*4882a593Smuzhiyun  */
122*4882a593Smuzhiyun 
123*4882a593Smuzhiyun 	.macro	__loop	as
124*4882a593Smuzhiyun 
125*4882a593Smuzhiyun #if XCHAL_HAVE_LOOPS
126*4882a593Smuzhiyun 		loop	\as, 99f
127*4882a593Smuzhiyun #else
128*4882a593Smuzhiyun 		98:
129*4882a593Smuzhiyun #endif
130*4882a593Smuzhiyun 
131*4882a593Smuzhiyun 	.endm
132*4882a593Smuzhiyun 
133*4882a593Smuzhiyun /*
134*4882a593Smuzhiyun  * end of loop with no increment of the address.
135*4882a593Smuzhiyun  */
136*4882a593Smuzhiyun 
137*4882a593Smuzhiyun 	.macro	__endl	ar, as
138*4882a593Smuzhiyun #if !XCHAL_HAVE_LOOPS
139*4882a593Smuzhiyun 		bltu	\ar, \as, 98b
140*4882a593Smuzhiyun #endif
141*4882a593Smuzhiyun 		99:
142*4882a593Smuzhiyun 	.endm
143*4882a593Smuzhiyun 
144*4882a593Smuzhiyun /*
145*4882a593Smuzhiyun  * end of loop with increment of the address.
146*4882a593Smuzhiyun  */
147*4882a593Smuzhiyun 
148*4882a593Smuzhiyun 	.macro	__endla	ar, as, incr
149*4882a593Smuzhiyun 		addi	\ar, \ar, \incr
150*4882a593Smuzhiyun 		__endl	\ar \as
151*4882a593Smuzhiyun 	.endm
152*4882a593Smuzhiyun 
153*4882a593Smuzhiyun /* Load or store instructions that may cause exceptions use the EX macro. */
154*4882a593Smuzhiyun 
155*4882a593Smuzhiyun #define EX(handler)				\
156*4882a593Smuzhiyun 	.section __ex_table, "a";		\
157*4882a593Smuzhiyun 	.word	97f, handler;			\
158*4882a593Smuzhiyun 	.previous				\
159*4882a593Smuzhiyun 97:
160*4882a593Smuzhiyun 
161*4882a593Smuzhiyun 
162*4882a593Smuzhiyun /*
163*4882a593Smuzhiyun  * Extract unaligned word that is split between two registers w0 and w1
164*4882a593Smuzhiyun  * into r regardless of machine endianness. SAR must be loaded with the
165*4882a593Smuzhiyun  * starting bit of the word (see __ssa8).
166*4882a593Smuzhiyun  */
167*4882a593Smuzhiyun 
168*4882a593Smuzhiyun 	.macro __src_b	r, w0, w1
169*4882a593Smuzhiyun #ifdef __XTENSA_EB__
170*4882a593Smuzhiyun 		src	\r, \w0, \w1
171*4882a593Smuzhiyun #else
172*4882a593Smuzhiyun 		src	\r, \w1, \w0
173*4882a593Smuzhiyun #endif
174*4882a593Smuzhiyun 	.endm
175*4882a593Smuzhiyun 
176*4882a593Smuzhiyun /*
177*4882a593Smuzhiyun  * Load 2 lowest address bits of r into SAR for __src_b to extract unaligned
178*4882a593Smuzhiyun  * word starting at r from two registers loaded from consecutive aligned
179*4882a593Smuzhiyun  * addresses covering r regardless of machine endianness.
180*4882a593Smuzhiyun  *
181*4882a593Smuzhiyun  *      r   0   1   2   3
182*4882a593Smuzhiyun  * LE SAR   0   8  16  24
183*4882a593Smuzhiyun  * BE SAR  32  24  16   8
184*4882a593Smuzhiyun  */
185*4882a593Smuzhiyun 
186*4882a593Smuzhiyun 	.macro __ssa8	r
187*4882a593Smuzhiyun #ifdef __XTENSA_EB__
188*4882a593Smuzhiyun 		ssa8b	\r
189*4882a593Smuzhiyun #else
190*4882a593Smuzhiyun 		ssa8l	\r
191*4882a593Smuzhiyun #endif
192*4882a593Smuzhiyun 	.endm
193*4882a593Smuzhiyun 
194*4882a593Smuzhiyun #define XTENSA_STACK_ALIGNMENT		16
195*4882a593Smuzhiyun 
196*4882a593Smuzhiyun #if defined(__XTENSA_WINDOWED_ABI__)
197*4882a593Smuzhiyun #define XTENSA_FRAME_SIZE_RESERVE	16
198*4882a593Smuzhiyun #define XTENSA_SPILL_STACK_RESERVE	32
199*4882a593Smuzhiyun 
200*4882a593Smuzhiyun #define abi_entry(frame_size) \
201*4882a593Smuzhiyun 	entry sp, (XTENSA_FRAME_SIZE_RESERVE + \
202*4882a593Smuzhiyun 		   (((frame_size) + XTENSA_STACK_ALIGNMENT - 1) & \
203*4882a593Smuzhiyun 		    -XTENSA_STACK_ALIGNMENT))
204*4882a593Smuzhiyun #define abi_entry_default abi_entry(0)
205*4882a593Smuzhiyun 
206*4882a593Smuzhiyun #define abi_ret(frame_size) retw
207*4882a593Smuzhiyun #define abi_ret_default retw
208*4882a593Smuzhiyun 
209*4882a593Smuzhiyun #elif defined(__XTENSA_CALL0_ABI__)
210*4882a593Smuzhiyun 
211*4882a593Smuzhiyun #define XTENSA_SPILL_STACK_RESERVE	0
212*4882a593Smuzhiyun 
213*4882a593Smuzhiyun #define abi_entry(frame_size) __abi_entry (frame_size)
214*4882a593Smuzhiyun 
215*4882a593Smuzhiyun 	.macro	__abi_entry frame_size
216*4882a593Smuzhiyun 	.ifgt \frame_size
217*4882a593Smuzhiyun 	addi sp, sp, -(((\frame_size) + XTENSA_STACK_ALIGNMENT - 1) & \
218*4882a593Smuzhiyun 		       -XTENSA_STACK_ALIGNMENT)
219*4882a593Smuzhiyun 	.endif
220*4882a593Smuzhiyun 	.endm
221*4882a593Smuzhiyun 
222*4882a593Smuzhiyun #define abi_entry_default
223*4882a593Smuzhiyun 
224*4882a593Smuzhiyun #define abi_ret(frame_size) __abi_ret (frame_size)
225*4882a593Smuzhiyun 
226*4882a593Smuzhiyun 	.macro	__abi_ret frame_size
227*4882a593Smuzhiyun 	.ifgt \frame_size
228*4882a593Smuzhiyun 	addi sp, sp, (((\frame_size) + XTENSA_STACK_ALIGNMENT - 1) & \
229*4882a593Smuzhiyun 		      -XTENSA_STACK_ALIGNMENT)
230*4882a593Smuzhiyun 	.endif
231*4882a593Smuzhiyun 	ret
232*4882a593Smuzhiyun 	.endm
233*4882a593Smuzhiyun 
234*4882a593Smuzhiyun #define abi_ret_default ret
235*4882a593Smuzhiyun 
236*4882a593Smuzhiyun #else
237*4882a593Smuzhiyun #error Unsupported Xtensa ABI
238*4882a593Smuzhiyun #endif
239*4882a593Smuzhiyun 
240*4882a593Smuzhiyun #define __XTENSA_HANDLER	.section ".exception.text", "ax"
241*4882a593Smuzhiyun 
242*4882a593Smuzhiyun #endif /* _XTENSA_ASMMACRO_H */
243