xref: /OK3568_Linux_fs/kernel/arch/mips/include/asm/asm-eva.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun  * This file is subject to the terms and conditions of the GNU General Public
3*4882a593Smuzhiyun  * License.  See the file "COPYING" in the main directory of this archive
4*4882a593Smuzhiyun  * for more details.
5*4882a593Smuzhiyun  *
6*4882a593Smuzhiyun  * Copyright (C) 2014 Imagination Technologies Ltd.
7*4882a593Smuzhiyun  *
8*4882a593Smuzhiyun  */
9*4882a593Smuzhiyun 
10*4882a593Smuzhiyun #ifndef __ASM_ASM_EVA_H
11*4882a593Smuzhiyun #define __ASM_ASM_EVA_H
12*4882a593Smuzhiyun 
13*4882a593Smuzhiyun #ifndef __ASSEMBLY__
14*4882a593Smuzhiyun 
15*4882a593Smuzhiyun /* Kernel variants */
16*4882a593Smuzhiyun 
17*4882a593Smuzhiyun #define kernel_cache(op, base)		"cache " op ", " base "\n"
18*4882a593Smuzhiyun #define kernel_pref(hint, base)		"pref " hint ", " base "\n"
19*4882a593Smuzhiyun #define kernel_ll(reg, addr)		"ll " reg ", " addr "\n"
20*4882a593Smuzhiyun #define kernel_sc(reg, addr)		"sc " reg ", " addr "\n"
21*4882a593Smuzhiyun #define kernel_lw(reg, addr)		"lw " reg ", " addr "\n"
22*4882a593Smuzhiyun #define kernel_lwl(reg, addr)		"lwl " reg ", " addr "\n"
23*4882a593Smuzhiyun #define kernel_lwr(reg, addr)		"lwr " reg ", " addr "\n"
24*4882a593Smuzhiyun #define kernel_lh(reg, addr)		"lh " reg ", " addr "\n"
25*4882a593Smuzhiyun #define kernel_lb(reg, addr)		"lb " reg ", " addr "\n"
26*4882a593Smuzhiyun #define kernel_lbu(reg, addr)		"lbu " reg ", " addr "\n"
27*4882a593Smuzhiyun #define kernel_sw(reg, addr)		"sw " reg ", " addr "\n"
28*4882a593Smuzhiyun #define kernel_swl(reg, addr)		"swl " reg ", " addr "\n"
29*4882a593Smuzhiyun #define kernel_swr(reg, addr)		"swr " reg ", " addr "\n"
30*4882a593Smuzhiyun #define kernel_sh(reg, addr)		"sh " reg ", " addr "\n"
31*4882a593Smuzhiyun #define kernel_sb(reg, addr)		"sb " reg ", " addr "\n"
32*4882a593Smuzhiyun 
33*4882a593Smuzhiyun #ifdef CONFIG_32BIT
34*4882a593Smuzhiyun /*
35*4882a593Smuzhiyun  * No 'sd' or 'ld' instructions in 32-bit but the code will
36*4882a593Smuzhiyun  * do the correct thing
37*4882a593Smuzhiyun  */
38*4882a593Smuzhiyun #define kernel_sd(reg, addr)		user_sw(reg, addr)
39*4882a593Smuzhiyun #define kernel_ld(reg, addr)		user_lw(reg, addr)
40*4882a593Smuzhiyun #else
41*4882a593Smuzhiyun #define kernel_sd(reg, addr)		"sd " reg", " addr "\n"
42*4882a593Smuzhiyun #define kernel_ld(reg, addr)		"ld " reg", " addr "\n"
43*4882a593Smuzhiyun #endif /* CONFIG_32BIT */
44*4882a593Smuzhiyun 
45*4882a593Smuzhiyun #ifdef CONFIG_EVA
46*4882a593Smuzhiyun 
47*4882a593Smuzhiyun #define __BUILD_EVA_INSN(insn, reg, addr)				\
48*4882a593Smuzhiyun 				"	.set	push\n"			\
49*4882a593Smuzhiyun 				"	.set	mips0\n"		\
50*4882a593Smuzhiyun 				"	.set	eva\n"			\
51*4882a593Smuzhiyun 				"	"insn" "reg", "addr "\n"	\
52*4882a593Smuzhiyun 				"	.set	pop\n"
53*4882a593Smuzhiyun 
54*4882a593Smuzhiyun #define user_cache(op, base)		__BUILD_EVA_INSN("cachee", op, base)
55*4882a593Smuzhiyun #define user_pref(hint, base)		__BUILD_EVA_INSN("prefe", hint, base)
56*4882a593Smuzhiyun #define user_ll(reg, addr)		__BUILD_EVA_INSN("lle", reg, addr)
57*4882a593Smuzhiyun #define user_sc(reg, addr)		__BUILD_EVA_INSN("sce", reg, addr)
58*4882a593Smuzhiyun #define user_lw(reg, addr)		__BUILD_EVA_INSN("lwe", reg, addr)
59*4882a593Smuzhiyun #define user_lwl(reg, addr)		__BUILD_EVA_INSN("lwle", reg, addr)
60*4882a593Smuzhiyun #define user_lwr(reg, addr)		__BUILD_EVA_INSN("lwre", reg, addr)
61*4882a593Smuzhiyun #define user_lh(reg, addr)		__BUILD_EVA_INSN("lhe", reg, addr)
62*4882a593Smuzhiyun #define user_lb(reg, addr)		__BUILD_EVA_INSN("lbe", reg, addr)
63*4882a593Smuzhiyun #define user_lbu(reg, addr)		__BUILD_EVA_INSN("lbue", reg, addr)
64*4882a593Smuzhiyun /* No 64-bit EVA instruction for loading double words */
65*4882a593Smuzhiyun #define user_ld(reg, addr)		user_lw(reg, addr)
66*4882a593Smuzhiyun #define user_sw(reg, addr)		__BUILD_EVA_INSN("swe", reg, addr)
67*4882a593Smuzhiyun #define user_swl(reg, addr)		__BUILD_EVA_INSN("swle", reg, addr)
68*4882a593Smuzhiyun #define user_swr(reg, addr)		__BUILD_EVA_INSN("swre", reg, addr)
69*4882a593Smuzhiyun #define user_sh(reg, addr)		__BUILD_EVA_INSN("she", reg, addr)
70*4882a593Smuzhiyun #define user_sb(reg, addr)		__BUILD_EVA_INSN("sbe", reg, addr)
71*4882a593Smuzhiyun /* No 64-bit EVA instruction for storing double words */
72*4882a593Smuzhiyun #define user_sd(reg, addr)		user_sw(reg, addr)
73*4882a593Smuzhiyun 
74*4882a593Smuzhiyun #else
75*4882a593Smuzhiyun 
76*4882a593Smuzhiyun #define user_cache(op, base)		kernel_cache(op, base)
77*4882a593Smuzhiyun #define user_pref(hint, base)		kernel_pref(hint, base)
78*4882a593Smuzhiyun #define user_ll(reg, addr)		kernel_ll(reg, addr)
79*4882a593Smuzhiyun #define user_sc(reg, addr)		kernel_sc(reg, addr)
80*4882a593Smuzhiyun #define user_lw(reg, addr)		kernel_lw(reg, addr)
81*4882a593Smuzhiyun #define user_lwl(reg, addr)		kernel_lwl(reg, addr)
82*4882a593Smuzhiyun #define user_lwr(reg, addr)		kernel_lwr(reg, addr)
83*4882a593Smuzhiyun #define user_lh(reg, addr)		kernel_lh(reg, addr)
84*4882a593Smuzhiyun #define user_lb(reg, addr)		kernel_lb(reg, addr)
85*4882a593Smuzhiyun #define user_lbu(reg, addr)		kernel_lbu(reg, addr)
86*4882a593Smuzhiyun #define user_sw(reg, addr)		kernel_sw(reg, addr)
87*4882a593Smuzhiyun #define user_swl(reg, addr)		kernel_swl(reg, addr)
88*4882a593Smuzhiyun #define user_swr(reg, addr)		kernel_swr(reg, addr)
89*4882a593Smuzhiyun #define user_sh(reg, addr)		kernel_sh(reg, addr)
90*4882a593Smuzhiyun #define user_sb(reg, addr)		kernel_sb(reg, addr)
91*4882a593Smuzhiyun 
92*4882a593Smuzhiyun #ifdef CONFIG_32BIT
93*4882a593Smuzhiyun #define user_sd(reg, addr)		kernel_sw(reg, addr)
94*4882a593Smuzhiyun #define user_ld(reg, addr)		kernel_lw(reg, addr)
95*4882a593Smuzhiyun #else
96*4882a593Smuzhiyun #define user_sd(reg, addr)		kernel_sd(reg, addr)
97*4882a593Smuzhiyun #define user_ld(reg, addr)		kernel_ld(reg, addr)
98*4882a593Smuzhiyun #endif /* CONFIG_32BIT */
99*4882a593Smuzhiyun 
100*4882a593Smuzhiyun #endif /* CONFIG_EVA */
101*4882a593Smuzhiyun 
102*4882a593Smuzhiyun #else /* __ASSEMBLY__ */
103*4882a593Smuzhiyun 
104*4882a593Smuzhiyun #define kernel_cache(op, base)		cache op, base
105*4882a593Smuzhiyun #define kernel_pref(hint, base)		pref hint, base
106*4882a593Smuzhiyun #define kernel_ll(reg, addr)		ll reg, addr
107*4882a593Smuzhiyun #define kernel_sc(reg, addr)		sc reg, addr
108*4882a593Smuzhiyun #define kernel_lw(reg, addr)		lw reg, addr
109*4882a593Smuzhiyun #define kernel_lwl(reg, addr)		lwl reg, addr
110*4882a593Smuzhiyun #define kernel_lwr(reg, addr)		lwr reg, addr
111*4882a593Smuzhiyun #define kernel_lh(reg, addr)		lh reg, addr
112*4882a593Smuzhiyun #define kernel_lb(reg, addr)		lb reg, addr
113*4882a593Smuzhiyun #define kernel_lbu(reg, addr)		lbu reg, addr
114*4882a593Smuzhiyun #define kernel_sw(reg, addr)		sw reg, addr
115*4882a593Smuzhiyun #define kernel_swl(reg, addr)		swl reg, addr
116*4882a593Smuzhiyun #define kernel_swr(reg, addr)		swr reg, addr
117*4882a593Smuzhiyun #define kernel_sh(reg, addr)		sh reg, addr
118*4882a593Smuzhiyun #define kernel_sb(reg, addr)		sb reg, addr
119*4882a593Smuzhiyun 
120*4882a593Smuzhiyun #ifdef CONFIG_32BIT
121*4882a593Smuzhiyun /*
122*4882a593Smuzhiyun  * No 'sd' or 'ld' instructions in 32-bit but the code will
123*4882a593Smuzhiyun  * do the correct thing
124*4882a593Smuzhiyun  */
125*4882a593Smuzhiyun #define kernel_sd(reg, addr)		user_sw(reg, addr)
126*4882a593Smuzhiyun #define kernel_ld(reg, addr)		user_lw(reg, addr)
127*4882a593Smuzhiyun #else
128*4882a593Smuzhiyun #define kernel_sd(reg, addr)		sd reg, addr
129*4882a593Smuzhiyun #define kernel_ld(reg, addr)		ld reg, addr
130*4882a593Smuzhiyun #endif /* CONFIG_32BIT */
131*4882a593Smuzhiyun 
132*4882a593Smuzhiyun #ifdef CONFIG_EVA
133*4882a593Smuzhiyun 
134*4882a593Smuzhiyun #define __BUILD_EVA_INSN(insn, reg, addr)			\
135*4882a593Smuzhiyun 				.set	push;			\
136*4882a593Smuzhiyun 				.set	mips0;			\
137*4882a593Smuzhiyun 				.set	eva;			\
138*4882a593Smuzhiyun 				insn reg, addr;			\
139*4882a593Smuzhiyun 				.set	pop;
140*4882a593Smuzhiyun 
141*4882a593Smuzhiyun #define user_cache(op, base)		__BUILD_EVA_INSN(cachee, op, base)
142*4882a593Smuzhiyun #define user_pref(hint, base)		__BUILD_EVA_INSN(prefe, hint, base)
143*4882a593Smuzhiyun #define user_ll(reg, addr)		__BUILD_EVA_INSN(lle, reg, addr)
144*4882a593Smuzhiyun #define user_sc(reg, addr)		__BUILD_EVA_INSN(sce, reg, addr)
145*4882a593Smuzhiyun #define user_lw(reg, addr)		__BUILD_EVA_INSN(lwe, reg, addr)
146*4882a593Smuzhiyun #define user_lwl(reg, addr)		__BUILD_EVA_INSN(lwle, reg, addr)
147*4882a593Smuzhiyun #define user_lwr(reg, addr)		__BUILD_EVA_INSN(lwre, reg, addr)
148*4882a593Smuzhiyun #define user_lh(reg, addr)		__BUILD_EVA_INSN(lhe, reg, addr)
149*4882a593Smuzhiyun #define user_lb(reg, addr)		__BUILD_EVA_INSN(lbe, reg, addr)
150*4882a593Smuzhiyun #define user_lbu(reg, addr)		__BUILD_EVA_INSN(lbue, reg, addr)
151*4882a593Smuzhiyun /* No 64-bit EVA instruction for loading double words */
152*4882a593Smuzhiyun #define user_ld(reg, addr)		user_lw(reg, addr)
153*4882a593Smuzhiyun #define user_sw(reg, addr)		__BUILD_EVA_INSN(swe, reg, addr)
154*4882a593Smuzhiyun #define user_swl(reg, addr)		__BUILD_EVA_INSN(swle, reg, addr)
155*4882a593Smuzhiyun #define user_swr(reg, addr)		__BUILD_EVA_INSN(swre, reg, addr)
156*4882a593Smuzhiyun #define user_sh(reg, addr)		__BUILD_EVA_INSN(she, reg, addr)
157*4882a593Smuzhiyun #define user_sb(reg, addr)		__BUILD_EVA_INSN(sbe, reg, addr)
158*4882a593Smuzhiyun /* No 64-bit EVA instruction for loading double words */
159*4882a593Smuzhiyun #define user_sd(reg, addr)		user_sw(reg, addr)
160*4882a593Smuzhiyun #else
161*4882a593Smuzhiyun 
162*4882a593Smuzhiyun #define user_cache(op, base)		kernel_cache(op, base)
163*4882a593Smuzhiyun #define user_pref(hint, base)		kernel_pref(hint, base)
164*4882a593Smuzhiyun #define user_ll(reg, addr)		kernel_ll(reg, addr)
165*4882a593Smuzhiyun #define user_sc(reg, addr)		kernel_sc(reg, addr)
166*4882a593Smuzhiyun #define user_lw(reg, addr)		kernel_lw(reg, addr)
167*4882a593Smuzhiyun #define user_lwl(reg, addr)		kernel_lwl(reg, addr)
168*4882a593Smuzhiyun #define user_lwr(reg, addr)		kernel_lwr(reg, addr)
169*4882a593Smuzhiyun #define user_lh(reg, addr)		kernel_lh(reg, addr)
170*4882a593Smuzhiyun #define user_lb(reg, addr)		kernel_lb(reg, addr)
171*4882a593Smuzhiyun #define user_lbu(reg, addr)		kernel_lbu(reg, addr)
172*4882a593Smuzhiyun #define user_sw(reg, addr)		kernel_sw(reg, addr)
173*4882a593Smuzhiyun #define user_swl(reg, addr)		kernel_swl(reg, addr)
174*4882a593Smuzhiyun #define user_swr(reg, addr)		kernel_swr(reg, addr)
175*4882a593Smuzhiyun #define user_sh(reg, addr)		kernel_sh(reg, addr)
176*4882a593Smuzhiyun #define user_sb(reg, addr)		kernel_sb(reg, addr)
177*4882a593Smuzhiyun 
178*4882a593Smuzhiyun #ifdef CONFIG_32BIT
179*4882a593Smuzhiyun #define user_sd(reg, addr)		kernel_sw(reg, addr)
180*4882a593Smuzhiyun #define user_ld(reg, addr)		kernel_lw(reg, addr)
181*4882a593Smuzhiyun #else
182*4882a593Smuzhiyun #define user_sd(reg, addr)		kernel_sd(reg, addr)
183*4882a593Smuzhiyun #define user_ld(reg, addr)		kernel_ld(reg, addr)
184*4882a593Smuzhiyun #endif /* CONFIG_32BIT */
185*4882a593Smuzhiyun 
186*4882a593Smuzhiyun #endif /* CONFIG_EVA */
187*4882a593Smuzhiyun 
188*4882a593Smuzhiyun #endif /* __ASSEMBLY__ */
189*4882a593Smuzhiyun 
190*4882a593Smuzhiyun #endif /* __ASM_ASM_EVA_H */
191